pgpack-dumper 0.2.1.2__cp311-cp311-win_amd64.whl → 0.3.0.0__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,13 @@
1
+ """Common functions and classes."""
2
+
3
+ from .columns import make_columns
1
4
  from .connector import PGConnector
2
5
  from .copy import CopyBuffer
6
+ from .diagram import (
7
+ DBMetadata,
8
+ format_table,
9
+ transfer_diagram,
10
+ )
3
11
  from .errors import (
4
12
  CopyBufferError,
5
13
  CopyBufferObjectError,
@@ -29,6 +37,7 @@ __all__ = (
29
37
  "CopyBufferObjectError",
30
38
  "CopyBufferTableNotDefined",
31
39
  "CopyReader",
40
+ "DBMetadata",
32
41
  "DumperLogger",
33
42
  "PGConnector",
34
43
  "PGObject",
@@ -38,9 +47,12 @@ __all__ = (
38
47
  "PGPackDumperWriteError",
39
48
  "StreamReader",
40
49
  "chunk_query",
50
+ "format_table",
51
+ "make_columns",
41
52
  "query_path",
42
53
  "query_template",
43
54
  "random_name",
44
55
  "read_metadata",
45
56
  "search_object",
57
+ "transfer_diagram",
46
58
  )
@@ -0,0 +1,30 @@
1
+ from collections import OrderedDict
2
+
3
+ from pgcopylib import PGOid
4
+ from pgpack.common import PGParam
5
+
6
+
7
+ def make_columns(
8
+ list_columns: list[str],
9
+ pgtypes: list[PGOid],
10
+ pgparam: list[PGParam],
11
+ ) -> OrderedDict[str, str]:
12
+ """Make DBMetadata.columns dictionary."""
13
+
14
+ columns = OrderedDict()
15
+
16
+ for col_name, pgtype, param in zip(
17
+ list_columns,
18
+ pgtypes,
19
+ pgparam,
20
+ ):
21
+ col_type = pgtype.name
22
+
23
+ if pgtype is PGOid.bpchar:
24
+ col_type = f"{col_type}({param.length})"
25
+ elif pgtype is PGOid.numeric:
26
+ col_type = f"{col_type}({param.length}, {param.scale})"
27
+
28
+ columns[col_name] = col_type
29
+
30
+ return columns
@@ -0,0 +1,78 @@
1
+ from collections import OrderedDict
2
+ from typing import NamedTuple
3
+
4
+
5
+ class DBMetadata(NamedTuple):
6
+ """Database object."""
7
+
8
+ name: str
9
+ version: str
10
+ columns: OrderedDict
11
+
12
+
13
+ def truncate_text(text: str, max_length: int) -> str:
14
+ """Truncate text and add ellipsis if too long."""
15
+
16
+ if len(text) > max_length:
17
+ return text[: max_length - 1] + "…"
18
+ return text
19
+
20
+
21
+ def format_table(
22
+ metadata: DBMetadata,
23
+ direction: str,
24
+ table_width: int = 51,
25
+ ) -> list[str]:
26
+ """Format single table as list of lines."""
27
+
28
+ lines = []
29
+
30
+ title = f"{direction} [{metadata.name} {metadata.version}]"
31
+ lines.append(f"┌{''.ljust(table_width, '─')}┐")
32
+ lines.append(
33
+ f"│ {truncate_text(title, table_width - 1).ljust(table_width - 1)}│"
34
+ )
35
+ lines.append(f"╞{'═' * 25}╤{'═' * 25}╡")
36
+ lines.append(f"│ {'Column Name'.ljust(23)} │ {'Data Type'.ljust(23)} │")
37
+ lines.append(f"╞{'═' * 25}╪{'═' * 25}╡")
38
+
39
+ for i, (col_name, col_type) in enumerate(metadata.columns.items()):
40
+ truncated_name = truncate_text(col_name, 23)
41
+ truncated_type = truncate_text(str(col_type), 23)
42
+ lines.append(
43
+ f"│ {truncated_name.ljust(23)} │ {truncated_type.ljust(23)} │"
44
+ )
45
+ if i < len(metadata.columns) - 1:
46
+ lines.append(f"├{'─' * 25}┼{'─' * 25}┤")
47
+
48
+ lines.append(f"└{'─' * 25}┴{'─' * 25}┘")
49
+ return lines
50
+
51
+
52
+ def transfer_diagram(source: DBMetadata, destination: DBMetadata) -> str:
53
+ """Make transfer diagram with two tables and arrow."""
54
+
55
+ src_lines = format_table(source, "Source")
56
+ dest_lines = format_table(destination, "Destination")
57
+ max_lines = max(len(src_lines), len(dest_lines), 9)
58
+
59
+ src_lines.extend([" " * 53] * (max_lines - len(src_lines)))
60
+ dest_lines.extend([" " * 53] * (max_lines - len(dest_lines)))
61
+
62
+ middle_line = max_lines // 2
63
+ arrow_config = [
64
+ (middle_line - 3, " │╲ "),
65
+ (middle_line - 2, " │ ╲ "),
66
+ (middle_line - 1, "┌┘ ╲ "),
67
+ (middle_line, "│ ╲"),
68
+ (middle_line + 1, "│ ╱"),
69
+ (middle_line + 2, "└┐ ╱ "),
70
+ (middle_line + 3, " │ ╱ "),
71
+ (middle_line + 4, " │╱ "),
72
+ ]
73
+ arrow_map = {line: arrow for line, arrow in arrow_config}
74
+
75
+ return "Transfer data diagram:\n" + "\n".join(
76
+ f"{src_lines[row]} {arrow_map.get(row, ' ')} {dest_lines[row]}"
77
+ for row in range(max_lines)
78
+ )
pgpack_dumper/dumper.py CHANGED
@@ -1,3 +1,5 @@
1
+ from collections import OrderedDict
2
+ from collections.abc import Generator
1
3
  from io import (
2
4
  BufferedReader,
3
5
  BufferedWriter,
@@ -7,7 +9,8 @@ from types import MethodType
7
9
  from typing import (
8
10
  Any,
9
11
  Iterable,
10
- Union
12
+ Iterator,
13
+ Union,
11
14
  )
12
15
 
13
16
  from pgcopylib import PGCopyWriter
@@ -19,6 +22,7 @@ from pgpack import (
19
22
  )
20
23
  from psycopg import (
21
24
  Connection,
25
+ Copy,
22
26
  Cursor,
23
27
  )
24
28
  from pandas import DataFrame as PdFrame
@@ -27,6 +31,7 @@ from sqlparse import format as sql_format
27
31
 
28
32
  from .common import (
29
33
  CopyBuffer,
34
+ DBMetadata,
30
35
  DumperLogger,
31
36
  PGConnector,
32
37
  PGPackDumperError,
@@ -35,7 +40,9 @@ from .common import (
35
40
  PGPackDumperWriteBetweenError,
36
41
  StreamReader,
37
42
  chunk_query,
43
+ make_columns,
38
44
  query_template,
45
+ transfer_diagram,
39
46
  )
40
47
 
41
48
 
@@ -62,6 +69,8 @@ class PGPackDumper:
62
69
  )
63
70
  self.cursor: Cursor = self.connect.cursor()
64
71
  self.copy_buffer: CopyBuffer = CopyBuffer(self.cursor, self.logger)
72
+ self._dbmeta: DBMetadata | None = None
73
+ self._size = 0
65
74
  except Exception as error:
66
75
  self.logger.error(f"{error.__class__.__name__}: {error}")
67
76
  raise PGPackDumperError(error)
@@ -76,7 +85,7 @@ class PGPackDumper:
76
85
  if self.dbname == "greenplum":
77
86
  self.cursor.execute(query_template("gpversion"))
78
87
  gpversion = self.cursor.fetchone()[0]
79
- self.version = f"{self.version}|greenplum {gpversion}"
88
+ self.version = f"{self.version} gp {gpversion}"
80
89
 
81
90
  self.logger.info(
82
91
  f"PGPackDumper initialized for host {self.connector.host}"
@@ -93,7 +102,7 @@ class PGPackDumper:
93
102
  second_part: list[str]
94
103
 
95
104
  self: PGPackDumper = args[0]
96
- cursor: Cursor = kwargs.get("dumper_src", self).cursor
105
+ cursor: Cursor = (kwargs.get("dumper_src") or self).cursor
97
106
  query: str = kwargs.get("query_src") or kwargs.get("query")
98
107
  part: int = 1
99
108
  first_part, second_part = chunk_query(self.query_formatter(query))
@@ -147,21 +156,45 @@ class PGPackDumper:
147
156
  ) -> bool:
148
157
  """Internal method read_dump for generate kwargs to decorator."""
149
158
 
159
+ def __read_data(
160
+ copy_to: Iterator[Copy],
161
+ ) -> Generator[bytes, None, None]:
162
+ """Generate bytes from copy object with calc size."""
163
+
164
+ self._size = 0
165
+
166
+ for data in copy_to:
167
+ chunk = bytes(data)
168
+ self._size += len(chunk)
169
+ yield chunk
170
+
150
171
  try:
151
172
  self.copy_buffer.query = query
152
173
  self.copy_buffer.table_name = table_name
174
+ metadata = self.copy_buffer.metadata
153
175
  pgpack = PGPackWriter(
154
176
  fileobj,
155
- self.copy_buffer.metadata,
177
+ metadata,
156
178
  self.compression_method,
157
179
  )
180
+ columns = make_columns(*metadata_reader(metadata))
181
+ source = DBMetadata(
182
+ name=self.dbname,
183
+ version=self.version,
184
+ columns=columns,
185
+ )
186
+ destination = DBMetadata(
187
+ name="file",
188
+ version=fileobj.name,
189
+ columns=columns,
190
+ )
191
+ self.logger.info(transfer_diagram(source, destination))
158
192
 
159
193
  with self.copy_buffer.copy_to() as copy_to:
160
- pgpack.from_bytes(bytes(data) for data in copy_to)
194
+ pgpack.from_bytes(__read_data(copy_to))
161
195
 
162
- size = pgpack.tell()
163
196
  pgpack.close()
164
- self.logger.info(f"Successfully read {size} bytes.")
197
+ self.logger.info(f"Successfully read {self._size} bytes.")
165
198
  self.logger.info(
166
199
  f"Read pgpack dump from {self.connector.host} done."
167
200
  )
@@ -192,10 +225,14 @@ class PGPackDumper:
192
225
  query_src,
193
226
  table_src,
194
227
  )
228
+ src_dbname = self.dbname
229
+ src_version = self.version
195
230
  elif dumper_src.__class__ is PGPackDumper:
196
231
  source_copy_buffer = dumper_src.copy_buffer
197
232
  source_copy_buffer.table_name = table_src
198
233
  source_copy_buffer.query = query_src
234
+ src_dbname = dumper_src.dbname
235
+ src_version = dumper_src.version
199
236
  else:
200
237
  reader = dumper_src.to_reader(
201
238
  query=query_src,
@@ -205,12 +242,28 @@ class PGPackDumper:
205
242
  self.from_rows(
206
243
  dtype_data=dtype_data,
207
244
  table_name=table_dest,
245
+ source=dumper_src._dbmeta,
208
246
  )
209
247
  size = reader.tell()
210
248
  self.logger.info(f"Successfully sending {size} bytes.")
211
249
  return reader.close()
212
250
 
213
251
  self.copy_buffer.table_name = table_dest
252
+ source = DBMetadata(
253
+ name=src_dbname,
254
+ version=src_version,
255
+ columns=make_columns(
256
+ *metadata_reader(source_copy_buffer.metadata),
257
+ ),
258
+ )
259
+ destination = DBMetadata(
260
+ name=self.dbname,
261
+ version=self.version,
262
+ columns=make_columns(
263
+ *metadata_reader(self.copy_buffer.metadata),
264
+ ),
265
+ )
266
+ self.logger.info(transfer_diagram(source, destination))
214
267
  self.copy_buffer.copy_between(source_copy_buffer)
215
268
  self.connect.commit()
216
269
  return True
@@ -228,8 +281,16 @@ class PGPackDumper:
228
281
 
229
282
  self.copy_buffer.query = query
230
283
  self.copy_buffer.table_name = table_name
284
+ metadata = self.copy_buffer.metadata
285
+ self._dbmeta = DBMetadata(
286
+ name=self.dbname,
287
+ version=self.version,
288
+ columns=make_columns(
289
+ *metadata_reader(metadata),
290
+ ),
291
+ )
231
292
  return StreamReader(
232
- self.copy_buffer.metadata,
293
+ metadata,
233
294
  self.copy_buffer.copy_to(),
234
295
  )
235
296
 
@@ -255,12 +316,27 @@ class PGPackDumper:
255
316
  """Write PGPack dump into PostgreSQL/GreenPlum."""
256
317
 
257
318
  try:
258
- pgpack = PGPackReader(fileobj)
259
319
  self.copy_buffer.table_name = table_name
320
+ pgpack = PGPackReader(fileobj)
321
+ source = DBMetadata(
322
+ name="file",
323
+ version=fileobj.name,
324
+ columns=make_columns(
325
+ pgpack.columns,
326
+ pgpack.pgtypes,
327
+ pgpack.pgparam,
328
+ ),
329
+ )
330
+ destination = DBMetadata(
331
+ name=self.dbname,
332
+ version=self.version,
333
+ columns=make_columns(
334
+ *metadata_reader(self.copy_buffer.metadata),
335
+ ),
336
+ )
337
+ self.logger.info(transfer_diagram(source, destination))
260
338
  self.copy_buffer.copy_from(pgpack.to_bytes())
261
339
  self.connect.commit()
262
- size = pgpack.tell()
263
- self.logger.info(f"Successfully sending {size} bytes.")
264
340
  pgpack.close()
265
341
  self.refresh()
266
342
  except Exception as error:
@@ -299,13 +375,31 @@ class PGPackDumper:
299
375
  self,
300
376
  dtype_data: Iterable[Any],
301
377
  table_name: str,
378
+ source: DBMetadata | None = None,
302
379
  ) -> None:
303
380
  """Write from python iterable object
304
381
  into PostgreSQL/GreenPlum table."""
305
382
 
383
+ if not source:
384
+ source = DBMetadata(
385
+ name="python",
386
+ version="iterable object",
387
+ columns={"Unknown": "Unknown"},
388
+ )
389
+
306
390
  self.copy_buffer.table_name = table_name
307
- _, pgtypes, _ = metadata_reader(self.copy_buffer.metadata)
391
+ columns, pgtypes, pgparam = metadata_reader(self.copy_buffer.metadata)
308
392
  writer = PGCopyWriter(None, pgtypes)
393
+ destination = DBMetadata(
394
+ name=self.dbname,
395
+ version=self.version,
396
+ columns=make_columns(
397
+ list_columns=columns,
398
+ pgtypes=pgtypes,
399
+ pgparam=pgparam,
400
+ ),
401
+ )
402
+ self.logger.info(transfer_diagram(source, destination))
309
403
  self.copy_buffer.copy_from(writer.from_rows(dtype_data))
310
404
  self.connect.commit()
311
405
  self.refresh()
@@ -320,6 +414,14 @@ class PGPackDumper:
320
414
  self.from_rows(
321
415
  dtype_data=iter(data_frame.values),
322
416
  table_name=table_name,
417
+ source=DBMetadata(
418
+ name="pandas",
419
+ version="DataFrame",
420
+ columns=OrderedDict(zip(
421
+ data_frame.columns,
422
+ [str(dtype) for dtype in data_frame.dtypes],
423
+ )),
424
+ )
323
425
  )
324
426
 
325
427
  def from_polars(
@@ -332,6 +434,14 @@ class PGPackDumper:
332
434
  self.from_rows(
333
435
  dtype_data=data_frame.iter_rows(),
334
436
  table_name=table_name,
437
+ source=DBMetadata(
438
+ name="polars",
439
+ version="DataFrame",
440
+ columns=OrderedDict(zip(
441
+ data_frame.columns,
442
+ [str(dtype) for dtype in data_frame.dtypes],
443
+ )),
444
+ )
335
445
  )
336
446
 
337
447
  def refresh(self) -> None:
pgpack_dumper/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.2.1.2"
1
+ __version__ = "0.3.0.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pgpack_dumper
3
- Version: 0.2.1.2
3
+ Version: 0.3.0.0
4
4
  Summary: Library for read and write PGPack format between PostgreSQL and file.
5
5
  Author-email: 0xMihalich <bayanmobile87@gmail.com>
6
6
  Project-URL: Homepage, https://github.com/0xMihalich/pgpack_dumper
@@ -13,12 +13,13 @@ Classifier: Programming Language :: Python :: 3.10
13
13
  Classifier: Programming Language :: Python :: 3.11
14
14
  Classifier: Programming Language :: Python :: 3.12
15
15
  Classifier: Programming Language :: Python :: 3.13
16
+ Classifier: Programming Language :: Python :: 3.14
16
17
  Requires-Python: >=3.10
17
18
  Description-Content-Type: text/markdown
18
19
  License-File: LICENSE
19
- Requires-Dist: pgpack==0.3.0.8
20
- Requires-Dist: psycopg_binary>=3.2.10
21
- Requires-Dist: psycopg>=3.2.10
20
+ Requires-Dist: pgpack==0.3.0.9
21
+ Requires-Dist: psycopg_binary>=3.2.11
22
+ Requires-Dist: psycopg>=3.2.11
22
23
  Requires-Dist: sqlparse>=0.5.3
23
24
  Dynamic: license-file
24
25
 
@@ -1,14 +1,16 @@
1
1
  pgpack_dumper/__init__.py,sha256=wl4I02A6U4A6H3iZC4xNC1N-Y2f9GpgW8lSdbFAR-eA,1012
2
- pgpack_dumper/dumper.py,sha256=PP4LXP4niiG2WVZ5tI4KYsunxPPTCKeSePN9sbDWs5s,10979
3
- pgpack_dumper/version.py,sha256=MF9DSs0YzpCtcaHshVuPVAZarjKFC5EKcBb6pjmtKeE,25
4
- pgpack_dumper/common/__init__.py,sha256=fOjEYtpkuafk8klKzPrioT1Bbf_Fp89hraHyJe8HmdQ,1054
2
+ pgpack_dumper/dumper.py,sha256=d5JgkMGJjoK4hXte7AVpFbg-QSyDLMQy1-6uVpMYOWI,14721
3
+ pgpack_dumper/version.py,sha256=7QLWsdocVb_VhH8H-ER-x1bX4v3i5iRdbn82QyaGJ3Q,25
4
+ pgpack_dumper/common/__init__.py,sha256=sWiUnLyvYZZJNIZYWl6MmntIUyDm-naM3DVoV3YcNDA,1300
5
+ pgpack_dumper/common/columns.py,sha256=sV3e0IErvuBZvIsATSQx_hajBp59VTmni5EmSAdyRBY,723
5
6
  pgpack_dumper/common/connector.py,sha256=3SIGIDTd9OCdr4k5uiPJxzHMGifvsJAEotzzY1GBZgU,187
6
7
  pgpack_dumper/common/copy.py,sha256=9YORQ05GEQgA8y-rcLEYfV5QQQvUnUpFc3wxd-7v5IE,5098
8
+ pgpack_dumper/common/diagram.py,sha256=IkmRtKF6Jt4z0cMWpAYGCZ4LwhNWQ4qkWYEJd8Xvd9I,2583
7
9
  pgpack_dumper/common/errors.py,sha256=m0nLUBkpOrodRTesI9WYlpy_MUJ5GPY1e-TaI7RP2LU,617
8
10
  pgpack_dumper/common/logger.py,sha256=PF7L1WYWkj4qU-CDpQTraqSiCPi_AZSmaUt1nscO5gM,1745
9
11
  pgpack_dumper/common/metadata.py,sha256=keknqsofg8nUfu_e38siM5CR-bj8hiiLHSRRXpDs_qU,902
10
12
  pgpack_dumper/common/query.py,sha256=fB1XLT_vK6LBS92p_qZfQyLZTmoplzGCcG63rgPZ-T8,1421
11
- pgpack_dumper/common/reader.cp311-win_amd64.pyd,sha256=B28tzkoZ6M5ivf5Hyxz92B9ZG0BstoEEV-l8tp0VNEY,59392
13
+ pgpack_dumper/common/reader.cp311-win_amd64.pyd,sha256=wHQgjaoHH4ah7X0On22yABT9AKNSCTCQ2FOKIRX5h3c,59392
12
14
  pgpack_dumper/common/reader.pxd,sha256=_CVyKYdSScWrDZ7-FeCafFvXSOJDN3KwWIE0dH07Zbo,263
13
15
  pgpack_dumper/common/reader.pyi,sha256=z4ZQMiHLUm1p7lDuWrhnQnv9_sv-GVmBa_5_0Ns1UZc,710
14
16
  pgpack_dumper/common/reader.pyx,sha256=dxu2igQiJvq3EzPRoTpsirxS0ZlQxqTrJrAoz-8Waso,1888
@@ -21,8 +23,8 @@ pgpack_dumper/common/queryes/dbname.sql,sha256=yJN5CJF3KOrbHdOYmoj7DjFveEojjWsmW
21
23
  pgpack_dumper/common/queryes/gpversion.sql,sha256=iUwW40pFCVAZhDZ6ah8CIW75_KMBLyvUxMyqOI7vxtA,80
22
24
  pgpack_dumper/common/queryes/prepare.sql,sha256=1pACITTlfQo1F_tEtOGigFCkWOkbxD421B4xJjjG70Y,185
23
25
  pgpack_dumper/common/queryes/relkind.sql,sha256=wpyWaIiuCqQ0YaFPsNAEwwxSxuErNnRmP__-bfZ1vho,66
24
- pgpack_dumper-0.2.1.2.dist-info/licenses/LICENSE,sha256=jW7Zeev0dRSYqbEnAk6qGz3e3yok-NhdTxP0ceNPlWM,1088
25
- pgpack_dumper-0.2.1.2.dist-info/METADATA,sha256=OVNx6yw5HuS_f_4WyYn5loicKe9vN1E5Brgq98dBmsg,5218
26
- pgpack_dumper-0.2.1.2.dist-info/WHEEL,sha256=JLOMsP7F5qtkAkINx5UnzbFguf8CqZeraV8o04b0I8I,101
27
- pgpack_dumper-0.2.1.2.dist-info/top_level.txt,sha256=AhxLKWTyGtJWNLWUSyBu54xY4wjVS8vSXgW4wwq4bYk,14
28
- pgpack_dumper-0.2.1.2.dist-info/RECORD,,
26
+ pgpack_dumper-0.3.0.0.dist-info/licenses/LICENSE,sha256=jW7Zeev0dRSYqbEnAk6qGz3e3yok-NhdTxP0ceNPlWM,1088
27
+ pgpack_dumper-0.3.0.0.dist-info/METADATA,sha256=7XT-EG2TaKdjIt4ae_RAxo-ZJ4bN_B7M8yPjNTjzEcg,5270
28
+ pgpack_dumper-0.3.0.0.dist-info/WHEEL,sha256=JLOMsP7F5qtkAkINx5UnzbFguf8CqZeraV8o04b0I8I,101
29
+ pgpack_dumper-0.3.0.0.dist-info/top_level.txt,sha256=AhxLKWTyGtJWNLWUSyBu54xY4wjVS8vSXgW4wwq4bYk,14
30
+ pgpack_dumper-0.3.0.0.dist-info/RECORD,,