pglib 5.7.0__tar.gz → 5.8.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pglib-5.7.0/pglib.egg-info → pglib-5.8.1}/PKG-INFO +1 -1
- {pglib-5.7.0 → pglib-5.8.1/pglib.egg-info}/PKG-INFO +1 -1
- {pglib-5.7.0 → pglib-5.8.1}/setup.py +1 -1
- {pglib-5.7.0 → pglib-5.8.1}/src/connection.cpp +150 -9
- {pglib-5.7.0 → pglib-5.8.1}/src/pglib.h +1 -1
- {pglib-5.7.0 → pglib-5.8.1}/src/resultset.cpp +4 -4
- {pglib-5.7.0 → pglib-5.8.1}/test/test_sync.py +76 -10
- {pglib-5.7.0 → pglib-5.8.1}/test/testutils.py +5 -1
- {pglib-5.7.0 → pglib-5.8.1}/LICENSE +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/MANIFEST.in +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/README.rst +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/pglib/__init__.py +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/pglib/_version.py +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/pglib/asyncpglib.py +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/pglib.egg-info/SOURCES.txt +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/pglib.egg-info/dependency_links.txt +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/pglib.egg-info/top_level.txt +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/setup.cfg +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/byteswap.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/connection.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/conninfoopt.cpp +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/conninfoopt.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/datatypes.cpp +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/datatypes.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/debug.cpp +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/debug.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/enums.cpp +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/enums.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/errors.cpp +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/errors.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/getdata.cpp +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/getdata.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/juliandate.cpp +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/juliandate.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/params.cpp +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/params.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/pgarrays.cpp +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/pgarrays.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/pglib.cpp +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/pgtypes.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/resultset.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/row.cpp +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/row.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/runtime.cpp +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/runtime.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/type_hstore.cpp +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/type_hstore.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/type_json.cpp +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/type_json.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/type_ltree.cpp +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/src/type_ltree.h +0 -0
- {pglib-5.7.0 → pglib-5.8.1}/test/test_async.py +0 -0
|
@@ -149,6 +149,7 @@ static PGresult* internal_execute(PyObject* self, PyObject* args)
|
|
|
149
149
|
PyErr_SetString(PyExc_TypeError, "The first argument must be a string.");
|
|
150
150
|
return 0;
|
|
151
151
|
}
|
|
152
|
+
const char* szSQL = PyUnicode_AsUTF8(pSql);
|
|
152
153
|
|
|
153
154
|
Params params(cParams);
|
|
154
155
|
if (!BindParams(cnxn, params, args))
|
|
@@ -156,7 +157,7 @@ static PGresult* internal_execute(PyObject* self, PyObject* args)
|
|
|
156
157
|
|
|
157
158
|
PGresult* result;
|
|
158
159
|
Py_BEGIN_ALLOW_THREADS
|
|
159
|
-
result = PQexecParams(cnxn->pgconn,
|
|
160
|
+
result = PQexecParams(cnxn->pgconn, szSQL,
|
|
160
161
|
cParams,
|
|
161
162
|
params.types,
|
|
162
163
|
params.values,
|
|
@@ -175,6 +176,7 @@ static PGresult* internal_execute(PyObject* self, PyObject* args)
|
|
|
175
176
|
return result;
|
|
176
177
|
}
|
|
177
178
|
|
|
179
|
+
|
|
178
180
|
static const char doc_script[] = "Connection.script(sql) --> None\n\n"
|
|
179
181
|
"Executes a script which can contain multiple statements separated by semicolons.";
|
|
180
182
|
|
|
@@ -203,6 +205,7 @@ static PyObject* Connection_script(PyObject* self, PyObject* args)
|
|
|
203
205
|
}
|
|
204
206
|
}
|
|
205
207
|
|
|
208
|
+
|
|
206
209
|
const char* doc_copy_from =
|
|
207
210
|
"Connection.copy_from(command, source) --> int\n"
|
|
208
211
|
"\n"
|
|
@@ -233,7 +236,7 @@ static PyObject* Connection_copy_from(PyObject* self, PyObject* args)
|
|
|
233
236
|
// an object with a read method (e.g. file).
|
|
234
237
|
const char* buffer = 0;
|
|
235
238
|
Py_ssize_t buffer_size = 0;
|
|
236
|
-
|
|
239
|
+
Object read_method;
|
|
237
240
|
|
|
238
241
|
if (PyUnicode_Check(source))
|
|
239
242
|
{
|
|
@@ -245,7 +248,7 @@ static PyObject* Connection_copy_from(PyObject* self, PyObject* args)
|
|
|
245
248
|
{
|
|
246
249
|
if (!PyObject_HasAttrString(source, "read"))
|
|
247
250
|
return PyErr_Format(Error, "CSV source must be a string or file-like object.");
|
|
248
|
-
read_method
|
|
251
|
+
read_method.Attach(PyObject_GetAttrString(source, "read"));
|
|
249
252
|
}
|
|
250
253
|
|
|
251
254
|
Connection* cnxn = CastConnection(self, REQUIRE_OPEN);
|
|
@@ -353,6 +356,140 @@ static PyObject* Connection_copy_from(PyObject* self, PyObject* args)
|
|
|
353
356
|
}
|
|
354
357
|
|
|
355
358
|
|
|
359
|
+
const char* doc_copy_to_csv =
|
|
360
|
+
"Connection.copy_to_csv(table, dest, header=0, delimiter=',', quote='\"')\n"
|
|
361
|
+
"\n"
|
|
362
|
+
"Execute a COPY TO command and return the number of records copied.\n"
|
|
363
|
+
"\n"
|
|
364
|
+
"table\n"
|
|
365
|
+
" The table to copy from.\n"
|
|
366
|
+
"\n"
|
|
367
|
+
"dest\n"
|
|
368
|
+
" The file-like object to write to. Strings will be written, not bytes, so\n"
|
|
369
|
+
" open in text mode.\n"
|
|
370
|
+
"\n"
|
|
371
|
+
"header\n"
|
|
372
|
+
" If non-zero, a CSV header will be written.\n";
|
|
373
|
+
|
|
374
|
+
|
|
375
|
+
static PyObject* Connection_copy_to_csv(PyObject* self, PyObject* args, PyObject* kwargs)
|
|
376
|
+
{
|
|
377
|
+
// This is not nearly as efficient as I'd like since newer Python versions no longer give
|
|
378
|
+
// us access to underlying file objects. We have to write strings through a write method
|
|
379
|
+
// since there are io layers involved.
|
|
380
|
+
//
|
|
381
|
+
// For maximum performance, we should probably offer an option where we open the file given
|
|
382
|
+
// a filename. We can either check the parameter type here or we could make a separate
|
|
383
|
+
// method with "file" in the name like copy_to_file.
|
|
384
|
+
|
|
385
|
+
static const char* kwlist[] = {"table", "dest", "header", "delimiter", "quote", 0};
|
|
386
|
+
|
|
387
|
+
PyObject* table;
|
|
388
|
+
PyObject* dest;
|
|
389
|
+
int header = 0;
|
|
390
|
+
char* szDelimiter = 0;
|
|
391
|
+
char* szQuote = 0;
|
|
392
|
+
|
|
393
|
+
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "UO|pzz", (char**)kwlist, &table, &dest, &header,
|
|
394
|
+
&szDelimiter, &szQuote)) {
|
|
395
|
+
return 0;
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
Connection* cnxn = CastConnection(self, REQUIRE_OPEN);
|
|
399
|
+
if (!cnxn)
|
|
400
|
+
return 0;
|
|
401
|
+
|
|
402
|
+
if (!PyObject_HasAttrString(dest, "write"))
|
|
403
|
+
return PyErr_Format(Error, "CSV destination must be a file-like object.");
|
|
404
|
+
Object write_method(PyObject_GetAttrString(dest, "write"));
|
|
405
|
+
|
|
406
|
+
char header_token[] = "header";
|
|
407
|
+
if (header == 0) {
|
|
408
|
+
header_token[0] = 0;
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
const char* pszDelimiter = szDelimiter ? szDelimiter : ",";
|
|
412
|
+
const char* pszQuote = szQuote ? szQuote : "\"";
|
|
413
|
+
|
|
414
|
+
Object sql(PyUnicode_FromFormat("copy %U to stdout with csv %s delimiter '%s' quote '%s'",
|
|
415
|
+
table, header_token, pszDelimiter, pszQuote));
|
|
416
|
+
|
|
417
|
+
const char* szSQL = PyUnicode_AsUTF8(sql);
|
|
418
|
+
ResultHolder result;
|
|
419
|
+
Py_BEGIN_ALLOW_THREADS
|
|
420
|
+
result = PQexec(cnxn->pgconn, szSQL);
|
|
421
|
+
Py_END_ALLOW_THREADS
|
|
422
|
+
|
|
423
|
+
if (result == 0)
|
|
424
|
+
return 0;
|
|
425
|
+
|
|
426
|
+
switch (PQresultStatus(result)) {
|
|
427
|
+
case PGRES_COPY_OUT:
|
|
428
|
+
// This is what we are expecting.
|
|
429
|
+
break;
|
|
430
|
+
|
|
431
|
+
case PGRES_BAD_RESPONSE:
|
|
432
|
+
case PGRES_NONFATAL_ERROR:
|
|
433
|
+
case PGRES_FATAL_ERROR:
|
|
434
|
+
return SetResultError(result.Detach());
|
|
435
|
+
|
|
436
|
+
default:
|
|
437
|
+
return PyErr_Format(Error, "Result was not PGRES_COPY_IN: %d", (int)PQresultStatus(result));
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
|
|
441
|
+
for (;;) {
|
|
442
|
+
int cb = 0;
|
|
443
|
+
char* buffer;
|
|
444
|
+
Py_BEGIN_ALLOW_THREADS
|
|
445
|
+
cb = PQgetCopyData(cnxn->pgconn, &buffer, 0);
|
|
446
|
+
Py_END_ALLOW_THREADS
|
|
447
|
+
|
|
448
|
+
if (cb == -2) {
|
|
449
|
+
return SetResultError(result.Detach());
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
if (cb == -1) {
|
|
453
|
+
// The copy is complete.
|
|
454
|
+
break;
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
// We have a buffer of byte data. We have the length, but the libpq docs say that the
|
|
458
|
+
// string is also zero terminated, so we're going to try not calling 'write'.
|
|
459
|
+
|
|
460
|
+
int err = PyFile_WriteString(buffer, dest);
|
|
461
|
+
|
|
462
|
+
// while (cb > 0) {
|
|
463
|
+
// PyObject* res = PyObject_CallObject(write_method)
|
|
464
|
+
// }
|
|
465
|
+
|
|
466
|
+
PQfreemem(buffer);
|
|
467
|
+
if (err) {
|
|
468
|
+
return 0;
|
|
469
|
+
}
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
// After a copy, you have to get another result to know if it was successful.
|
|
473
|
+
|
|
474
|
+
ResultHolder final_result;
|
|
475
|
+
ExecStatusType status = PGRES_COMMAND_OK;
|
|
476
|
+
Py_BEGIN_ALLOW_THREADS
|
|
477
|
+
final_result = PQgetResult(cnxn->pgconn);
|
|
478
|
+
status = PQresultStatus(final_result);
|
|
479
|
+
Py_END_ALLOW_THREADS
|
|
480
|
+
|
|
481
|
+
if (status != PGRES_COMMAND_OK) {
|
|
482
|
+
// SetResultError will take ownership of `result`.
|
|
483
|
+
return SetResultError(final_result.Detach());
|
|
484
|
+
}
|
|
485
|
+
|
|
486
|
+
const char* sz = PQcmdTuples(final_result);
|
|
487
|
+
if (sz == 0 || *sz == 0)
|
|
488
|
+
Py_RETURN_NONE;
|
|
489
|
+
return PyLong_FromLong(atoi(sz));
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
|
|
356
493
|
const char* doc_copy_from_csv =
|
|
357
494
|
"Connection.copy_from_csv(table, source, header=0) --> int\n"
|
|
358
495
|
"\n"
|
|
@@ -389,15 +526,16 @@ static PyObject* Connection_copy_from_csv(PyObject* self, PyObject* args, PyObje
|
|
|
389
526
|
|
|
390
527
|
const char* pszDelimiter = szDelimiter ? szDelimiter : ",";
|
|
391
528
|
const char* pszQuote = szQuote ? szQuote : "\"";
|
|
392
|
-
|
|
393
|
-
|
|
529
|
+
Object sql(PyUnicode_FromFormat("copy %U from stdin with csv %s delimiter '%s' quote '%s'",
|
|
530
|
+
table, header_token, pszDelimiter, pszQuote));
|
|
394
531
|
|
|
395
532
|
// If source is a string (Unicode), store the UTF-encoded value in buffer. If a byte
|
|
396
533
|
// object, store directly in buffer. Otherwise, buffer will be zero and `source` must be
|
|
397
534
|
// an object with a read method (e.g. file).
|
|
398
535
|
const char* buffer = 0;
|
|
399
536
|
Py_ssize_t buffer_size = 0;
|
|
400
|
-
|
|
537
|
+
Object read_method;
|
|
538
|
+
// PyObject* read_method = 0;
|
|
401
539
|
|
|
402
540
|
if (PyUnicode_Check(source))
|
|
403
541
|
{
|
|
@@ -409,7 +547,7 @@ static PyObject* Connection_copy_from_csv(PyObject* self, PyObject* args, PyObje
|
|
|
409
547
|
{
|
|
410
548
|
if (!PyObject_HasAttrString(source, "read"))
|
|
411
549
|
return PyErr_Format(Error, "CSV source must be a string or file-like object.");
|
|
412
|
-
read_method
|
|
550
|
+
read_method.Attach(PyObject_GetAttrString(source, "read"));
|
|
413
551
|
}
|
|
414
552
|
|
|
415
553
|
Connection* cnxn = CastConnection(self, REQUIRE_OPEN);
|
|
@@ -998,8 +1136,9 @@ static PyObject* Connection_sendQuery(PyObject* self, PyObject* args)
|
|
|
998
1136
|
return 0;
|
|
999
1137
|
|
|
1000
1138
|
int sent;
|
|
1139
|
+
const char* szSQL = PyUnicode_AsUTF8(pScript);
|
|
1001
1140
|
Py_BEGIN_ALLOW_THREADS
|
|
1002
|
-
sent = PQsendQuery(cnxn->pgconn,
|
|
1141
|
+
sent = PQsendQuery(cnxn->pgconn, szSQL);
|
|
1003
1142
|
Py_END_ALLOW_THREADS
|
|
1004
1143
|
|
|
1005
1144
|
if (!sent)
|
|
@@ -1037,6 +1176,7 @@ static PyObject* Connection_sendQueryParams(PyObject* self, PyObject* args)
|
|
|
1037
1176
|
PyErr_SetString(PyExc_TypeError, "The first argument must be the SQL string.");
|
|
1038
1177
|
return 0;
|
|
1039
1178
|
}
|
|
1179
|
+
const char* szSQL = PyUnicode_AsUTF8(pSql);
|
|
1040
1180
|
|
|
1041
1181
|
Params params(cParams);
|
|
1042
1182
|
if (!BindParams(cnxn, params, args))
|
|
@@ -1047,7 +1187,7 @@ static PyObject* Connection_sendQueryParams(PyObject* self, PyObject* args)
|
|
|
1047
1187
|
int needs_flush = -1;
|
|
1048
1188
|
|
|
1049
1189
|
Py_BEGIN_ALLOW_THREADS
|
|
1050
|
-
error = PQsendQueryParams(cnxn->pgconn,
|
|
1190
|
+
error = PQsendQueryParams(cnxn->pgconn, szSQL,
|
|
1051
1191
|
cParams,
|
|
1052
1192
|
params.types,
|
|
1053
1193
|
params.values,
|
|
@@ -1459,6 +1599,7 @@ static struct PyMethodDef Connection_methods[] =
|
|
|
1459
1599
|
{ "script", Connection_script, METH_VARARGS, doc_script },
|
|
1460
1600
|
{ "copy_from", (PyCFunction) Connection_copy_from, METH_VARARGS | METH_KEYWORDS, doc_copy_from },
|
|
1461
1601
|
{ "copy_from_csv", (PyCFunction) Connection_copy_from_csv, METH_VARARGS | METH_KEYWORDS, doc_copy_from_csv },
|
|
1602
|
+
{ "copy_to_csv", (PyCFunction) Connection_copy_to_csv, METH_VARARGS | METH_KEYWORDS, doc_copy_to_csv},
|
|
1462
1603
|
{ "begin", Connection_begin, METH_NOARGS, doc_begin },
|
|
1463
1604
|
{ "commit", Connection_commit, METH_NOARGS, doc_commit },
|
|
1464
1605
|
{ "rollback", Connection_rollback, METH_NOARGS, doc_rollback },
|
|
@@ -135,7 +135,7 @@ struct Object
|
|
|
135
135
|
{
|
|
136
136
|
PyObject* p;
|
|
137
137
|
|
|
138
|
-
// Borrows the reference (takes ownership without adding a new
|
|
138
|
+
// Borrows the reference (takes ownership without adding a new reference).
|
|
139
139
|
Object(PyObject* _p = 0) { p = _p; }
|
|
140
140
|
|
|
141
141
|
// If it still has the pointer, it dereferences it.
|
|
@@ -62,22 +62,22 @@ static PyObject* AllocateInfos(PGresult* result)
|
|
|
62
62
|
PyObject* val = PyUnicode_DecodeUTF8(szName, strlen(szName), 0);
|
|
63
63
|
if (val == 0)
|
|
64
64
|
return 0;
|
|
65
|
-
PyStructSequence_SET_ITEM(
|
|
65
|
+
PyStructSequence_SET_ITEM(info.Get(), iItem++, val);
|
|
66
66
|
|
|
67
67
|
val = PyLong_FromLong(PQftype(result, i));
|
|
68
68
|
if (!val)
|
|
69
69
|
return 0;
|
|
70
|
-
PyStructSequence_SET_ITEM(
|
|
70
|
+
PyStructSequence_SET_ITEM(info.Get(), iItem++, val);
|
|
71
71
|
|
|
72
72
|
val = PyLong_FromLong(PQfmod(result, i));
|
|
73
73
|
if (!val)
|
|
74
74
|
return 0;
|
|
75
|
-
PyStructSequence_SET_ITEM(
|
|
75
|
+
PyStructSequence_SET_ITEM(info.Get(), iItem++, val);
|
|
76
76
|
|
|
77
77
|
val = PyLong_FromLong(PQfsize(result, i));
|
|
78
78
|
if (!val)
|
|
79
79
|
return 0;
|
|
80
|
-
PyStructSequence_SET_ITEM(
|
|
80
|
+
PyStructSequence_SET_ITEM(info.Get(), iItem++, val);
|
|
81
81
|
|
|
82
82
|
infos.BorrowItem(i, info.Detach());
|
|
83
83
|
}
|
|
@@ -1,12 +1,14 @@
|
|
|
1
|
+
|
|
1
2
|
# Run with pytest.
|
|
2
3
|
|
|
3
4
|
# pylint: disable=missing-function-docstring,redefined-outer-name,unidiomatic-typecheck
|
|
4
5
|
|
|
5
|
-
import sys, threading, gzip, uuid, locale
|
|
6
|
+
import sys, threading, gzip, uuid, locale, tempfile, csv
|
|
6
7
|
from time import sleep
|
|
7
8
|
from os.path import join, dirname, exists
|
|
8
9
|
from decimal import Decimal
|
|
9
10
|
from datetime import date, time, datetime, timedelta
|
|
11
|
+
from pathlib import Path
|
|
10
12
|
import pytest
|
|
11
13
|
|
|
12
14
|
from .import testutils
|
|
@@ -30,7 +32,7 @@ def cnxn():
|
|
|
30
32
|
cnxn = pglib.connect(CONNINFO)
|
|
31
33
|
for i in range(3):
|
|
32
34
|
try:
|
|
33
|
-
cnxn.execute("drop table t
|
|
35
|
+
cnxn.execute(f"drop table t{i}")
|
|
34
36
|
except: # noqa
|
|
35
37
|
pass
|
|
36
38
|
yield cnxn
|
|
@@ -42,9 +44,9 @@ def _test_strtype(cnxn, sqltype, value, resulttype=None, colsize=None):
|
|
|
42
44
|
assert colsize is None or (value is None or colsize >= len(value))
|
|
43
45
|
|
|
44
46
|
if colsize:
|
|
45
|
-
sql = "create table t1(s
|
|
47
|
+
sql = f"create table t1(s {sqltype}({colsize}))"
|
|
46
48
|
else:
|
|
47
|
-
sql = "create table t1(s
|
|
49
|
+
sql = f"create table t1(s {sqltype})"
|
|
48
50
|
|
|
49
51
|
if resulttype is None:
|
|
50
52
|
resulttype = type(value)
|
|
@@ -146,7 +148,37 @@ def test_script(cnxn):
|
|
|
146
148
|
|
|
147
149
|
|
|
148
150
|
#
|
|
149
|
-
# copy
|
|
151
|
+
# copy to
|
|
152
|
+
#
|
|
153
|
+
|
|
154
|
+
def test_copytocsv(cnxn):
|
|
155
|
+
# Write a table to a CSV file.
|
|
156
|
+
#
|
|
157
|
+
# It isn't as efficient as I'd like since we can no longer get ahold of the file
|
|
158
|
+
# descriptor. I think we'll want to make this accept a string filename for a more
|
|
159
|
+
# efficient write. For now, make sure the output file is opened in text mode.
|
|
160
|
+
|
|
161
|
+
cnxn.execute("create table t1(a text, b text)")
|
|
162
|
+
cnxn.execute("insert into t1 values ('1', 'one'), ('2', 'two'), ('3', 'three')")
|
|
163
|
+
|
|
164
|
+
with tempfile.NamedTemporaryFile(mode='w', encoding='utf8') as tf:
|
|
165
|
+
print(tf.name)
|
|
166
|
+
count = cnxn.copy_to_csv('t1', tf, header=1)
|
|
167
|
+
assert count == 3
|
|
168
|
+
|
|
169
|
+
tf.seek(0)
|
|
170
|
+
with open(tf.name, mode='r', encoding='utf8') as fd:
|
|
171
|
+
reader = csv.reader(fd)
|
|
172
|
+
rows = []
|
|
173
|
+
for row in reader:
|
|
174
|
+
print('ROW=', row)
|
|
175
|
+
rows.append(row)
|
|
176
|
+
|
|
177
|
+
assert rows == [['a', 'b'], ['1', 'one'], ['2', 'two'], ['3', 'three']]
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
#
|
|
181
|
+
# copy from
|
|
150
182
|
#
|
|
151
183
|
|
|
152
184
|
def _datapath(filename):
|
|
@@ -157,7 +189,8 @@ def _datapath(filename):
|
|
|
157
189
|
|
|
158
190
|
def test_copyfromcsv_csv(cnxn):
|
|
159
191
|
cnxn.execute("create table t1(a int, b varchar(20))")
|
|
160
|
-
|
|
192
|
+
with open(_datapath('test-noheader.csv')) as fd:
|
|
193
|
+
count = cnxn.copy_from_csv("t1", fd)
|
|
161
194
|
assert count == 2
|
|
162
195
|
assert cnxn.fetchval("select count(*) from t1") == 2
|
|
163
196
|
row = cnxn.fetchrow("select a,b from t1 where a=2")
|
|
@@ -167,7 +200,8 @@ def test_copyfromcsv_csv(cnxn):
|
|
|
167
200
|
|
|
168
201
|
def test_copyfromcsv_csv_header(cnxn):
|
|
169
202
|
cnxn.execute("create table t1(a int, b varchar(20))")
|
|
170
|
-
|
|
203
|
+
with open(_datapath('test-header.csv')) as fd:
|
|
204
|
+
count = cnxn.copy_from_csv("t1", fd, header=True)
|
|
171
205
|
assert count == 2
|
|
172
206
|
assert cnxn.fetchval("select count(*) from t1") == 2
|
|
173
207
|
row = cnxn.fetchrow("select a,b from t1 where a=2")
|
|
@@ -182,13 +216,15 @@ def test_copyfromcsv_csv_error(cnxn):
|
|
|
182
216
|
# We'll make the second column too small.
|
|
183
217
|
cnxn.execute("create table t1(a int, b varchar(1) not null)")
|
|
184
218
|
with pytest.raises(pglib.Error):
|
|
185
|
-
|
|
219
|
+
with open(_datapath('test-noheader.csv')) as fd:
|
|
220
|
+
cnxn.copy_from_csv("t1", fd)
|
|
186
221
|
|
|
187
222
|
|
|
188
223
|
def test_copyfromcsv_csv_gzip(cnxn):
|
|
189
224
|
# I don't remember why this test is here. We're feeding it unzipped data.
|
|
190
225
|
cnxn.execute("create table t1(a int, b varchar(20))")
|
|
191
|
-
|
|
226
|
+
with gzip.open(_datapath('test-header.csv.gz')) as fd:
|
|
227
|
+
cnxn.copy_from_csv("t1", fd, header=True)
|
|
192
228
|
assert cnxn.fetchval("select count(*) from t1") == 2
|
|
193
229
|
row = cnxn.fetchrow("select a,b from t1 where a=2")
|
|
194
230
|
assert row.a == 2
|
|
@@ -585,6 +621,17 @@ def test_timestamptz(cnxn):
|
|
|
585
621
|
assert result == value
|
|
586
622
|
|
|
587
623
|
|
|
624
|
+
# def test_timezone(cnxn):
|
|
625
|
+
# """
|
|
626
|
+
# Ensure we can pass a datetime that has UTC already.
|
|
627
|
+
# """
|
|
628
|
+
# cnxn.execute("create table t1(a timestamptz)")
|
|
629
|
+
# value = datetime(2023, 11, 30, 21, 0, tzinfo=timezone.utc)
|
|
630
|
+
# cnxn.execute("insert into t1 values ($1)", value)
|
|
631
|
+
# result = cnxn.fetchval("select a from t1")
|
|
632
|
+
# assert result == value
|
|
633
|
+
|
|
634
|
+
|
|
588
635
|
def test_timezone_toutc(cnxn):
|
|
589
636
|
"""
|
|
590
637
|
Ensure we can pass a datetime as a parameter and convert it to UTC.
|
|
@@ -1050,7 +1097,7 @@ def test_closed_error(cnxn):
|
|
|
1050
1097
|
cnxn.rollback()
|
|
1051
1098
|
|
|
1052
1099
|
|
|
1053
|
-
def
|
|
1100
|
+
def test_connection_count():
|
|
1054
1101
|
before = pglib.connection_count()
|
|
1055
1102
|
cnxn = pglib.connect(CONNINFO)
|
|
1056
1103
|
assert pglib.connection_count() == (before + 1)
|
|
@@ -1060,6 +1107,18 @@ def test_count():
|
|
|
1060
1107
|
assert pglib.connection_count() == before
|
|
1061
1108
|
|
|
1062
1109
|
|
|
1110
|
+
def test_count(cnxn):
|
|
1111
|
+
"Ensure delete statements return affected row count."
|
|
1112
|
+
cnxn.execute(
|
|
1113
|
+
"""
|
|
1114
|
+
select generate_series(1, 3) as id
|
|
1115
|
+
into t1
|
|
1116
|
+
""")
|
|
1117
|
+
|
|
1118
|
+
count = cnxn.execute("delete from t1 where id in (1, 2, 3)")
|
|
1119
|
+
assert count == 3
|
|
1120
|
+
|
|
1121
|
+
|
|
1063
1122
|
def test_hstore(cnxn):
|
|
1064
1123
|
cnxn.execute("create extension if not exists hstore")
|
|
1065
1124
|
row = cnxn.fetchrow("select oid, typname from pg_type where typname='hstore'")
|
|
@@ -1191,3 +1250,10 @@ def test_ltree(cnxn):
|
|
|
1191
1250
|
result = cnxn.fetchval("select path from t1")
|
|
1192
1251
|
assert result == 'a.b.c'
|
|
1193
1252
|
|
|
1253
|
+
|
|
1254
|
+
def test_large_insert(cnxn):
|
|
1255
|
+
cnxn.execute("drop table if exists large_data")
|
|
1256
|
+
cnxn.execute("create table large_data(code text, description text)")
|
|
1257
|
+
query = (Path(__file__).parent / 'large-insert.sql').read_text(encoding='utf8')
|
|
1258
|
+
print('-' * 40)
|
|
1259
|
+
cnxn.execute(query)
|
|
@@ -25,7 +25,11 @@ def add_to_path():
|
|
|
25
25
|
|
|
26
26
|
# Only go into directories that match the current Python's version number.
|
|
27
27
|
|
|
28
|
-
dir_suffix =
|
|
28
|
+
dir_suffix = (
|
|
29
|
+
'-%s.%s' % (sys.version_info[0], sys.version_info[1]),
|
|
30
|
+
'-%s%s' % (sys.version_info[0], sys.version_info[1]),
|
|
31
|
+
'-%s%s-pydebug' % (sys.version_info[0], sys.version_info[1])
|
|
32
|
+
)
|
|
29
33
|
|
|
30
34
|
build = join(dirname(dirname(abspath(__file__))), 'build')
|
|
31
35
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|