pgpack-dumper 0.2.1.1__cp313-cp313-macosx_11_0_arm64.whl → 0.3.0.0__cp313-cp313-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,13 @@
1
+ """Common functions and classes."""
2
+
3
+ from .columns import make_columns
1
4
  from .connector import PGConnector
2
5
  from .copy import CopyBuffer
6
+ from .diagram import (
7
+ DBMetadata,
8
+ format_table,
9
+ transfer_diagram,
10
+ )
3
11
  from .errors import (
4
12
  CopyBufferError,
5
13
  CopyBufferObjectError,
@@ -29,6 +37,7 @@ __all__ = (
29
37
  "CopyBufferObjectError",
30
38
  "CopyBufferTableNotDefined",
31
39
  "CopyReader",
40
+ "DBMetadata",
32
41
  "DumperLogger",
33
42
  "PGConnector",
34
43
  "PGObject",
@@ -38,9 +47,12 @@ __all__ = (
38
47
  "PGPackDumperWriteError",
39
48
  "StreamReader",
40
49
  "chunk_query",
50
+ "format_table",
51
+ "make_columns",
41
52
  "query_path",
42
53
  "query_template",
43
54
  "random_name",
44
55
  "read_metadata",
45
56
  "search_object",
57
+ "transfer_diagram",
46
58
  )
@@ -0,0 +1,30 @@
1
+ from collections import OrderedDict
2
+
3
+ from pgcopylib import PGOid
4
+ from pgpack.common import PGParam
5
+
6
+
7
+ def make_columns(
8
+ list_columns: list[str],
9
+ pgtypes: list[PGOid],
10
+ pgparam: list[PGParam],
11
+ ) -> OrderedDict[str, str]:
12
+ """Make DBMetadata.columns dictionary."""
13
+
14
+ columns = OrderedDict()
15
+
16
+ for col_name, pgtype, param in zip(
17
+ list_columns,
18
+ pgtypes,
19
+ pgparam,
20
+ ):
21
+ col_type = pgtype.name
22
+
23
+ if pgtype is PGOid.bpchar:
24
+ col_type = f"{col_type}({param.length})"
25
+ elif pgtype is PGOid.numeric:
26
+ col_type = f"{col_type}({param.length}, {param.scale})"
27
+
28
+ columns[col_name] = col_type
29
+
30
+ return columns
@@ -0,0 +1,78 @@
1
+ from collections import OrderedDict
2
+ from typing import NamedTuple
3
+
4
+
5
+ class DBMetadata(NamedTuple):
6
+ """Database object."""
7
+
8
+ name: str
9
+ version: str
10
+ columns: OrderedDict
11
+
12
+
13
+ def truncate_text(text: str, max_length: int) -> str:
14
+ """Truncate text and add ellipsis if too long."""
15
+
16
+ if len(text) > max_length:
17
+ return text[: max_length - 1] + "…"
18
+ return text
19
+
20
+
21
+ def format_table(
22
+ metadata: DBMetadata,
23
+ direction: str,
24
+ table_width: int = 51,
25
+ ) -> list[str]:
26
+ """Format single table as list of lines."""
27
+
28
+ lines = []
29
+
30
+ title = f"{direction} [{metadata.name} {metadata.version}]"
31
+ lines.append(f"┌{''.ljust(table_width, '─')}┐")
32
+ lines.append(
33
+ f"│ {truncate_text(title, table_width - 1).ljust(table_width - 1)}│"
34
+ )
35
+ lines.append(f"╞{'═' * 25}╤{'═' * 25}╡")
36
+ lines.append(f"│ {'Column Name'.ljust(23)} │ {'Data Type'.ljust(23)} │")
37
+ lines.append(f"╞{'═' * 25}╪{'═' * 25}╡")
38
+
39
+ for i, (col_name, col_type) in enumerate(metadata.columns.items()):
40
+ truncated_name = truncate_text(col_name, 23)
41
+ truncated_type = truncate_text(str(col_type), 23)
42
+ lines.append(
43
+ f"│ {truncated_name.ljust(23)} │ {truncated_type.ljust(23)} │"
44
+ )
45
+ if i < len(metadata.columns) - 1:
46
+ lines.append(f"├{'─' * 25}┼{'─' * 25}┤")
47
+
48
+ lines.append(f"└{'─' * 25}┴{'─' * 25}┘")
49
+ return lines
50
+
51
+
52
+ def transfer_diagram(source: DBMetadata, destination: DBMetadata) -> str:
53
+ """Make transfer diagram with two tables and arrow."""
54
+
55
+ src_lines = format_table(source, "Source")
56
+ dest_lines = format_table(destination, "Destination")
57
+ max_lines = max(len(src_lines), len(dest_lines), 9)
58
+
59
+ src_lines.extend([" " * 53] * (max_lines - len(src_lines)))
60
+ dest_lines.extend([" " * 53] * (max_lines - len(dest_lines)))
61
+
62
+ middle_line = max_lines // 2
63
+ arrow_config = [
64
+ (middle_line - 3, " │╲ "),
65
+ (middle_line - 2, " │ ╲ "),
66
+ (middle_line - 1, "┌┘ ╲ "),
67
+ (middle_line, "│ ╲"),
68
+ (middle_line + 1, "│ ╱"),
69
+ (middle_line + 2, "└┐ ╱ "),
70
+ (middle_line + 3, " │ ╱ "),
71
+ (middle_line + 4, " │╱ "),
72
+ ]
73
+ arrow_map = {line: arrow for line, arrow in arrow_config}
74
+
75
+ return "Transfer data diagram:\n" + "\n".join(
76
+ f"{src_lines[row]} {arrow_map.get(row, ' ')} {dest_lines[row]}"
77
+ for row in range(max_lines)
78
+ )
@@ -0,0 +1,2 @@
1
+ select case when count(nspname) = 1 then 'greenplum' else 'postgres' end as dbname
2
+ from pg_catalog.pg_namespace where nspname = 'gp_toolkit';
@@ -0,0 +1 @@
1
+ select substring(version() from 'Greenplum Database (.*?) build') as gp_version;
pgpack_dumper/dumper.py CHANGED
@@ -1,3 +1,5 @@
1
+ from collections import OrderedDict
2
+ from collections.abc import Generator
1
3
  from io import (
2
4
  BufferedReader,
3
5
  BufferedWriter,
@@ -7,7 +9,8 @@ from types import MethodType
7
9
  from typing import (
8
10
  Any,
9
11
  Iterable,
10
- Union
12
+ Iterator,
13
+ Union,
11
14
  )
12
15
 
13
16
  from pgcopylib import PGCopyWriter
@@ -19,6 +22,7 @@ from pgpack import (
19
22
  )
20
23
  from psycopg import (
21
24
  Connection,
25
+ Copy,
22
26
  Cursor,
23
27
  )
24
28
  from pandas import DataFrame as PdFrame
@@ -27,6 +31,7 @@ from sqlparse import format as sql_format
27
31
 
28
32
  from .common import (
29
33
  CopyBuffer,
34
+ DBMetadata,
30
35
  DumperLogger,
31
36
  PGConnector,
32
37
  PGPackDumperError,
@@ -35,6 +40,9 @@ from .common import (
35
40
  PGPackDumperWriteBetweenError,
36
41
  StreamReader,
37
42
  chunk_query,
43
+ make_columns,
44
+ query_template,
45
+ transfer_diagram,
38
46
  )
39
47
 
40
48
 
@@ -61,14 +69,24 @@ class PGPackDumper:
61
69
  )
62
70
  self.cursor: Cursor = self.connect.cursor()
63
71
  self.copy_buffer: CopyBuffer = CopyBuffer(self.cursor, self.logger)
72
+ self._dbmeta: DBMetadata | None = None
73
+ self._size = 0
64
74
  except Exception as error:
65
75
  self.logger.error(f"{error.__class__.__name__}: {error}")
66
76
  raise PGPackDumperError(error)
67
77
 
78
+ self.cursor.execute(query_template("dbname"))
79
+ self.dbname = self.cursor.fetchone()[0]
68
80
  self.version = (
69
81
  f"{self.connect.info.server_version // 10000}."
70
82
  f"{self.connect.info.server_version % 1000}"
71
83
  )
84
+
85
+ if self.dbname == "greenplum":
86
+ self.cursor.execute(query_template("gpversion"))
87
+ gpversion = self.cursor.fetchone()[0]
88
+ self.version = f"{self.version} gp {gpversion}"
89
+
72
90
  self.logger.info(
73
91
  f"PGPackDumper initialized for host {self.connector.host}"
74
92
  f"[version {self.version}]"
@@ -84,7 +102,7 @@ class PGPackDumper:
84
102
  second_part: list[str]
85
103
 
86
104
  self: PGPackDumper = args[0]
87
- cursor: Cursor = kwargs.get("dumper_src", self).cursor
105
+ cursor: Cursor = (kwargs.get("dumper_src") or self).cursor
88
106
  query: str = kwargs.get("query_src") or kwargs.get("query")
89
107
  part: int = 1
90
108
  first_part, second_part = chunk_query(self.query_formatter(query))
@@ -138,21 +156,45 @@ class PGPackDumper:
138
156
  ) -> bool:
139
157
  """Internal method read_dump for generate kwargs to decorator."""
140
158
 
159
+ def __read_data(
160
+ copy_to: Iterator[Copy],
161
+ ) -> Generator[bytes, None, None]:
162
+ """Generate bytes from copy object with calc size."""
163
+
164
+ self._size = 0
165
+
166
+ for data in copy_to:
167
+ chunk = bytes(data)
168
+ self._size += len(chunk)
169
+ yield chunk
170
+
141
171
  try:
142
172
  self.copy_buffer.query = query
143
173
  self.copy_buffer.table_name = table_name
174
+ metadata = self.copy_buffer.metadata
144
175
  pgpack = PGPackWriter(
145
176
  fileobj,
146
- self.copy_buffer.metadata,
177
+ metadata,
147
178
  self.compression_method,
148
179
  )
180
+ columns = make_columns(*metadata_reader(metadata))
181
+ source = DBMetadata(
182
+ name=self.dbname,
183
+ version=self.version,
184
+ columns=columns,
185
+ )
186
+ destination = DBMetadata(
187
+ name="file",
188
+ version=fileobj.name,
189
+ columns=columns,
190
+ )
191
+ self.logger.info(transfer_diagram(source, destination))
149
192
 
150
193
  with self.copy_buffer.copy_to() as copy_to:
151
- pgpack.from_bytes(bytes(data) for data in copy_to)
194
+ pgpack.from_bytes(__read_data(copy_to))
152
195
 
153
- size = pgpack.tell()
154
196
  pgpack.close()
155
- self.logger.info(f"Successfully read {size} bytes.")
197
+ self.logger.info(f"Successfully read {self._size} bytes.")
156
198
  self.logger.info(
157
199
  f"Read pgpack dump from {self.connector.host} done."
158
200
  )
@@ -183,10 +225,14 @@ class PGPackDumper:
183
225
  query_src,
184
226
  table_src,
185
227
  )
228
+ src_dbname = self.dbname
229
+ src_version = self.version
186
230
  elif dumper_src.__class__ is PGPackDumper:
187
231
  source_copy_buffer = dumper_src.copy_buffer
188
232
  source_copy_buffer.table_name = table_src
189
233
  source_copy_buffer.query = query_src
234
+ src_dbname = dumper_src.dbname
235
+ src_version = dumper_src.version
190
236
  else:
191
237
  reader = dumper_src.to_reader(
192
238
  query=query_src,
@@ -196,12 +242,28 @@ class PGPackDumper:
196
242
  self.from_rows(
197
243
  dtype_data=dtype_data,
198
244
  table_name=table_dest,
245
+ source=dumper_src._dbmeta,
199
246
  )
200
247
  size = reader.tell()
201
248
  self.logger.info(f"Successfully sending {size} bytes.")
202
249
  return reader.close()
203
250
 
204
251
  self.copy_buffer.table_name = table_dest
252
+ source = DBMetadata(
253
+ name=src_dbname,
254
+ version=src_version,
255
+ columns=make_columns(
256
+ *metadata_reader(source_copy_buffer.metadata),
257
+ ),
258
+ )
259
+ destination = DBMetadata(
260
+ name=self.dbname,
261
+ version=self.version,
262
+ columns=make_columns(
263
+ *metadata_reader(self.copy_buffer.metadata),
264
+ ),
265
+ )
266
+ self.logger.info(transfer_diagram(source, destination))
205
267
  self.copy_buffer.copy_between(source_copy_buffer)
206
268
  self.connect.commit()
207
269
  return True
@@ -219,8 +281,16 @@ class PGPackDumper:
219
281
 
220
282
  self.copy_buffer.query = query
221
283
  self.copy_buffer.table_name = table_name
284
+ metadata = self.copy_buffer.metadata
285
+ self._dbmeta = DBMetadata(
286
+ name=self.dbname,
287
+ version=self.version,
288
+ columns=make_columns(
289
+ *metadata_reader(metadata),
290
+ ),
291
+ )
222
292
  return StreamReader(
223
- self.copy_buffer.metadata,
293
+ metadata,
224
294
  self.copy_buffer.copy_to(),
225
295
  )
226
296
 
@@ -246,12 +316,27 @@ class PGPackDumper:
246
316
  """Write PGPack dump into PostgreSQL/GreenPlum."""
247
317
 
248
318
  try:
249
- pgpack = PGPackReader(fileobj)
250
319
  self.copy_buffer.table_name = table_name
320
+ pgpack = PGPackReader(fileobj)
321
+ source = DBMetadata(
322
+ name="file",
323
+ version=fileobj.name,
324
+ columns=make_columns(
325
+ pgpack.columns,
326
+ pgpack.pgtypes,
327
+ pgpack.pgparam,
328
+ ),
329
+ )
330
+ destination = DBMetadata(
331
+ name=self.dbname,
332
+ version=self.version,
333
+ columns=make_columns(
334
+ *metadata_reader(self.copy_buffer.metadata),
335
+ ),
336
+ )
337
+ self.logger.info(transfer_diagram(source, destination))
251
338
  self.copy_buffer.copy_from(pgpack.to_bytes())
252
339
  self.connect.commit()
253
- size = pgpack.tell()
254
- self.logger.info(f"Successfully sending {size} bytes.")
255
340
  pgpack.close()
256
341
  self.refresh()
257
342
  except Exception as error:
@@ -290,13 +375,31 @@ class PGPackDumper:
290
375
  self,
291
376
  dtype_data: Iterable[Any],
292
377
  table_name: str,
378
+ source: DBMetadata | None = None,
293
379
  ) -> None:
294
380
  """Write from python iterable object
295
381
  into PostgreSQL/GreenPlum table."""
296
382
 
383
+ if not source:
384
+ source = DBMetadata(
385
+ name="python",
386
+ version="iterable object",
387
+ columns={"Unknown": "Unknown"},
388
+ )
389
+
297
390
  self.copy_buffer.table_name = table_name
298
- _, pgtypes, _ = metadata_reader(self.copy_buffer.metadata)
391
+ columns, pgtypes, pgparam = metadata_reader(self.copy_buffer.metadata)
299
392
  writer = PGCopyWriter(None, pgtypes)
393
+ destination = DBMetadata(
394
+ name=self.dbname,
395
+ version=self.version,
396
+ columns=make_columns(
397
+ list_columns=columns,
398
+ pgtypes=pgtypes,
399
+ pgparam=pgparam,
400
+ ),
401
+ )
402
+ self.logger.info(transfer_diagram(source, destination))
300
403
  self.copy_buffer.copy_from(writer.from_rows(dtype_data))
301
404
  self.connect.commit()
302
405
  self.refresh()
@@ -311,6 +414,14 @@ class PGPackDumper:
311
414
  self.from_rows(
312
415
  dtype_data=iter(data_frame.values),
313
416
  table_name=table_name,
417
+ source=DBMetadata(
418
+ name="pandas",
419
+ version="DataFrame",
420
+ columns=OrderedDict(zip(
421
+ data_frame.columns,
422
+ [str(dtype) for dtype in data_frame.dtypes],
423
+ )),
424
+ )
314
425
  )
315
426
 
316
427
  def from_polars(
@@ -323,6 +434,14 @@ class PGPackDumper:
323
434
  self.from_rows(
324
435
  dtype_data=data_frame.iter_rows(),
325
436
  table_name=table_name,
437
+ source=DBMetadata(
438
+ name="polars",
439
+ version="DataFrame",
440
+ columns=OrderedDict(zip(
441
+ data_frame.columns,
442
+ [str(dtype) for dtype in data_frame.dtypes],
443
+ )),
444
+ )
326
445
  )
327
446
 
328
447
  def refresh(self) -> None:
pgpack_dumper/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.2.1.1"
1
+ __version__ = "0.3.0.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pgpack_dumper
3
- Version: 0.2.1.1
3
+ Version: 0.3.0.0
4
4
  Summary: Library for read and write PGPack format between PostgreSQL and file.
5
5
  Author-email: 0xMihalich <bayanmobile87@gmail.com>
6
6
  Project-URL: Homepage, https://github.com/0xMihalich/pgpack_dumper
@@ -13,12 +13,13 @@ Classifier: Programming Language :: Python :: 3.10
13
13
  Classifier: Programming Language :: Python :: 3.11
14
14
  Classifier: Programming Language :: Python :: 3.12
15
15
  Classifier: Programming Language :: Python :: 3.13
16
+ Classifier: Programming Language :: Python :: 3.14
16
17
  Requires-Python: >=3.10
17
18
  Description-Content-Type: text/markdown
18
19
  License-File: LICENSE
19
- Requires-Dist: pgpack==0.3.0.8
20
- Requires-Dist: psycopg_binary>=3.2.10
21
- Requires-Dist: psycopg>=3.2.10
20
+ Requires-Dist: pgpack==0.3.0.9
21
+ Requires-Dist: psycopg_binary>=3.2.11
22
+ Requires-Dist: psycopg>=3.2.11
22
23
  Requires-Dist: sqlparse>=0.5.3
23
24
  Dynamic: license-file
24
25
 
@@ -1,26 +1,30 @@
1
- pgpack_dumper-0.2.1.1.dist-info/RECORD,,
2
- pgpack_dumper-0.2.1.1.dist-info/WHEEL,sha256=oqGJCpG61FZJmvyZ3C_0aCv-2mdfcY9e3fXvyUNmWfM,136
3
- pgpack_dumper-0.2.1.1.dist-info/top_level.txt,sha256=AhxLKWTyGtJWNLWUSyBu54xY4wjVS8vSXgW4wwq4bYk,14
4
- pgpack_dumper-0.2.1.1.dist-info/METADATA,sha256=tXrpK7qauZHn1aJjdB83tIHC8qb_2tG95JQTGmvbYaQ,5018
5
- pgpack_dumper-0.2.1.1.dist-info/licenses/LICENSE,sha256=9IYMxtPlcVkrfjeRPA9GRUmaQkLG_FL-Gx7HxLvcLDQ,1067
6
- pgpack_dumper/version.py,sha256=6XpKk8PqQElU3s4yNEnOro0ov1S0j4k_2mEnP9McEB4,24
1
+ pgpack_dumper-0.3.0.0.dist-info/RECORD,,
2
+ pgpack_dumper-0.3.0.0.dist-info/WHEEL,sha256=oqGJCpG61FZJmvyZ3C_0aCv-2mdfcY9e3fXvyUNmWfM,136
3
+ pgpack_dumper-0.3.0.0.dist-info/top_level.txt,sha256=AhxLKWTyGtJWNLWUSyBu54xY4wjVS8vSXgW4wwq4bYk,14
4
+ pgpack_dumper-0.3.0.0.dist-info/METADATA,sha256=m8GpfNmtDaxEgZW-imd67SfDs_LXTlOsL6ZBuvuJe6s,5069
5
+ pgpack_dumper-0.3.0.0.dist-info/licenses/LICENSE,sha256=9IYMxtPlcVkrfjeRPA9GRUmaQkLG_FL-Gx7HxLvcLDQ,1067
6
+ pgpack_dumper/version.py,sha256=QjDEkJ4u3QAQZ3CwLE5X_wJEerb9U3SSlJbnhJdcrxg,24
7
7
  pgpack_dumper/__init__.py,sha256=2x9LRaKPMjMuOoP9pBAeSRZoAeCo9B8fxFbnW_N7kh8,966
8
- pgpack_dumper/dumper.py,sha256=M6cPnDLWT9dJheduNIA6g0WqwS7vzQbWIGYKnB_QijM,10288
8
+ pgpack_dumper/dumper.py,sha256=5WnXg69hszHjzPXRoAMAvFY5ZrVsKVtx48XMsLDG0og,14261
9
9
  pgpack_dumper/common/metadata.py,sha256=Z_vbgi1V85jgPR0h1xbUtt3Ru7vMcmG06bd13Gn_ocY,864
10
+ pgpack_dumper/common/diagram.py,sha256=2w5-s5qwYjCd0cqqlBRbHQ8E90jU7Ay7TNLeROHjCa0,2505
10
11
  pgpack_dumper/common/query.py,sha256=5kMXFJvBWCKW3hYfqsI7F1qc1I786IPNGIhMXypIv1E,1359
11
12
  pgpack_dumper/common/reader.pyx,sha256=vRIR558enMtx4gueWtoujJbpRoWbKnLWx_foVR-rxIw,1821
12
- pgpack_dumper/common/__init__.py,sha256=_SvvDUz9lykOlIk7Y05f3ZpS1nMZAtQJ_7TNT_zD1IE,1008
13
+ pgpack_dumper/common/__init__.py,sha256=gz-xk9TgMHVx5hlFnipO3kuLp7k3grb-OhoSP5Y7LvU,1242
13
14
  pgpack_dumper/common/copy.py,sha256=3mzGGVOVMdnUXqmpdXu3THQJpJB85D9LQN-MC-LX-fg,4941
14
15
  pgpack_dumper/common/logger.py,sha256=gsFCOjnCU3Hvg5UGYMVrPjKRCZKRFtBFBX-7QLZshFo,1675
15
16
  pgpack_dumper/common/stream.py,sha256=PXWcxmDmuK7ueI3MTUfQnmXKWz3A99W-knNRvq_-ZG0,2245
16
- pgpack_dumper/common/reader.cpython-313-darwin.so,sha256=AmP8XuHzqBLyldKpA3Kw4sf1NwRLLFN274p9HIHUwd4,115872
17
+ pgpack_dumper/common/reader.cpython-313-darwin.so,sha256=LSD0XVgbm0VYcr406cC_WQBFjsQo76PnmKId5ZU6Pew,115872
18
+ pgpack_dumper/common/columns.py,sha256=Pa8bzhoi8tXbm3GlgVeHYM3auPORD41q-L29JF_pRqM,693
17
19
  pgpack_dumper/common/structs.py,sha256=gS7vT-_yZ6R45j212B0kYKCJgHalxkdEN84Itctv7Ok,1033
18
20
  pgpack_dumper/common/connector.py,sha256=WGBovyXWxgeseGXploUW0L_4R0GQBtuInl89zqvbKLY,176
19
21
  pgpack_dumper/common/reader.pxd,sha256=flQddQni7UgYU_XPGpnILc7GVDFYjRgq-tbKRTUPbE8,252
20
22
  pgpack_dumper/common/reader.pyi,sha256=UoW-mEAw_G7TLBbIoPTJwpdtucv5IygHdkHc_ZcVM5Y,675
21
23
  pgpack_dumper/common/errors.py,sha256=1WIe9PUq5-Q6VNWyWAaXWr7xuXS2ixJCUB5sDOi5Xgs,591
22
24
  pgpack_dumper/common/queryes/copy_from.sql,sha256=hEHfzW7P9-q74jtgMMA94kGpiMeyURs0pnFLbmqgdW0,50
25
+ pgpack_dumper/common/queryes/gpversion.sql,sha256=iUwW40pFCVAZhDZ6ah8CIW75_KMBLyvUxMyqOI7vxtA,80
23
26
  pgpack_dumper/common/queryes/relkind.sql,sha256=wpyWaIiuCqQ0YaFPsNAEwwxSxuErNnRmP__-bfZ1vho,66
24
27
  pgpack_dumper/common/queryes/copy_to.sql,sha256=AUSsFl_WFB88UcC45SLd8lD_1La1MIkV2CKX-gCOxEE,49
25
28
  pgpack_dumper/common/queryes/attributes.sql,sha256=TriT025-yo7k9Bba3mKmpEYZiutE6ml4-5pAnGMemSg,394
29
+ pgpack_dumper/common/queryes/dbname.sql,sha256=Z3b6a_566PxA52ywRJw1JDSCUMC_c-FyulvW8mC1C7c,141
26
30
  pgpack_dumper/common/queryes/prepare.sql,sha256=oqRk3GU0gjFzM0nLNsKWeD8GfeThZVKsFiMY1-tdb8g,182