pglib 5.7.1__tar.gz → 5.8.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pglib-5.7.1/pglib.egg-info → pglib-5.8.2}/PKG-INFO +1 -1
- {pglib-5.7.1 → pglib-5.8.2}/pglib/__init__.py +0 -4
- {pglib-5.7.1 → pglib-5.8.2/pglib.egg-info}/PKG-INFO +1 -1
- {pglib-5.7.1 → pglib-5.8.2}/pglib.egg-info/SOURCES.txt +0 -1
- {pglib-5.7.1 → pglib-5.8.2}/setup.py +1 -1
- {pglib-5.7.1 → pglib-5.8.2}/src/connection.cpp +168 -7
- {pglib-5.7.1 → pglib-5.8.2}/src/params.cpp +1 -1
- {pglib-5.7.1 → pglib-5.8.2}/src/pglib.h +1 -1
- {pglib-5.7.1 → pglib-5.8.2}/test/test_sync.py +57 -11
- pglib-5.7.1/pglib/_version.py +0 -484
- {pglib-5.7.1 → pglib-5.8.2}/LICENSE +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/MANIFEST.in +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/README.rst +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/pglib/asyncpglib.py +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/pglib.egg-info/dependency_links.txt +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/pglib.egg-info/top_level.txt +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/setup.cfg +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/byteswap.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/connection.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/conninfoopt.cpp +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/conninfoopt.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/datatypes.cpp +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/datatypes.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/debug.cpp +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/debug.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/enums.cpp +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/enums.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/errors.cpp +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/errors.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/getdata.cpp +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/getdata.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/juliandate.cpp +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/juliandate.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/params.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/pgarrays.cpp +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/pgarrays.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/pglib.cpp +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/pgtypes.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/resultset.cpp +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/resultset.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/row.cpp +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/row.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/runtime.cpp +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/runtime.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/type_hstore.cpp +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/type_hstore.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/type_json.cpp +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/type_json.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/type_ltree.cpp +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/src/type_ltree.h +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/test/test_async.py +0 -0
- {pglib-5.7.1 → pglib-5.8.2}/test/testutils.py +0 -0
|
@@ -150,6 +150,8 @@ static PGresult* internal_execute(PyObject* self, PyObject* args)
|
|
|
150
150
|
return 0;
|
|
151
151
|
}
|
|
152
152
|
const char* szSQL = PyUnicode_AsUTF8(pSql);
|
|
153
|
+
if (!szSQL)
|
|
154
|
+
return 0;
|
|
153
155
|
|
|
154
156
|
Params params(cParams);
|
|
155
157
|
if (!BindParams(cnxn, params, args))
|
|
@@ -176,6 +178,7 @@ static PGresult* internal_execute(PyObject* self, PyObject* args)
|
|
|
176
178
|
return result;
|
|
177
179
|
}
|
|
178
180
|
|
|
181
|
+
|
|
179
182
|
static const char doc_script[] = "Connection.script(sql) --> None\n\n"
|
|
180
183
|
"Executes a script which can contain multiple statements separated by semicolons.";
|
|
181
184
|
|
|
@@ -189,7 +192,11 @@ static PyObject* Connection_script(PyObject* self, PyObject* args)
|
|
|
189
192
|
if (!PyArg_ParseTuple(args, "U", &pScript))
|
|
190
193
|
return 0;
|
|
191
194
|
|
|
192
|
-
|
|
195
|
+
const char* szScript = PyUnicode_AsUTF8(pScript);
|
|
196
|
+
if (!szScript)
|
|
197
|
+
return 0;
|
|
198
|
+
|
|
199
|
+
ResultHolder result = PQexec(cnxn->pgconn, szScript);
|
|
193
200
|
if (result == 0)
|
|
194
201
|
return 0;
|
|
195
202
|
|
|
@@ -204,6 +211,7 @@ static PyObject* Connection_script(PyObject* self, PyObject* args)
|
|
|
204
211
|
}
|
|
205
212
|
}
|
|
206
213
|
|
|
214
|
+
|
|
207
215
|
const char* doc_copy_from =
|
|
208
216
|
"Connection.copy_from(command, source) --> int\n"
|
|
209
217
|
"\n"
|
|
@@ -234,7 +242,7 @@ static PyObject* Connection_copy_from(PyObject* self, PyObject* args)
|
|
|
234
242
|
// an object with a read method (e.g. file).
|
|
235
243
|
const char* buffer = 0;
|
|
236
244
|
Py_ssize_t buffer_size = 0;
|
|
237
|
-
|
|
245
|
+
Object read_method;
|
|
238
246
|
|
|
239
247
|
if (PyUnicode_Check(source))
|
|
240
248
|
{
|
|
@@ -246,7 +254,7 @@ static PyObject* Connection_copy_from(PyObject* self, PyObject* args)
|
|
|
246
254
|
{
|
|
247
255
|
if (!PyObject_HasAttrString(source, "read"))
|
|
248
256
|
return PyErr_Format(Error, "CSV source must be a string or file-like object.");
|
|
249
|
-
read_method
|
|
257
|
+
read_method.Attach(PyObject_GetAttrString(source, "read"));
|
|
250
258
|
}
|
|
251
259
|
|
|
252
260
|
Connection* cnxn = CastConnection(self, REQUIRE_OPEN);
|
|
@@ -254,6 +262,9 @@ static PyObject* Connection_copy_from(PyObject* self, PyObject* args)
|
|
|
254
262
|
return 0;
|
|
255
263
|
|
|
256
264
|
const char* szSQL = PyUnicode_AsUTF8(command);
|
|
265
|
+
if (!szSQL)
|
|
266
|
+
return 0;
|
|
267
|
+
|
|
257
268
|
ResultHolder result;
|
|
258
269
|
Py_BEGIN_ALLOW_THREADS
|
|
259
270
|
result = PQexec(cnxn->pgconn, szSQL);
|
|
@@ -354,6 +365,145 @@ static PyObject* Connection_copy_from(PyObject* self, PyObject* args)
|
|
|
354
365
|
}
|
|
355
366
|
|
|
356
367
|
|
|
368
|
+
const char* doc_copy_to_csv =
|
|
369
|
+
"Connection.copy_to_csv(table, dest, header=0, delimiter=',', quote='\"')\n"
|
|
370
|
+
"\n"
|
|
371
|
+
"Execute a COPY TO command and return the number of records copied.\n"
|
|
372
|
+
"\n"
|
|
373
|
+
"table\n"
|
|
374
|
+
" The table to copy from.\n"
|
|
375
|
+
"\n"
|
|
376
|
+
"dest\n"
|
|
377
|
+
" The file-like object to write to. Strings will be written, not bytes, so\n"
|
|
378
|
+
" open in text mode.\n"
|
|
379
|
+
"\n"
|
|
380
|
+
"header\n"
|
|
381
|
+
" If non-zero, a CSV header will be written.\n";
|
|
382
|
+
|
|
383
|
+
|
|
384
|
+
static PyObject* Connection_copy_to_csv(PyObject* self, PyObject* args, PyObject* kwargs)
|
|
385
|
+
{
|
|
386
|
+
// This is not nearly as efficient as I'd like since newer Python versions no longer give
|
|
387
|
+
// us access to underlying file objects. We have to write strings through a write method
|
|
388
|
+
// since there are io layers involved.
|
|
389
|
+
//
|
|
390
|
+
// For maximum performance, we should probably offer an option where we open the file given
|
|
391
|
+
// a filename. We can either check the parameter type here or we could make a separate
|
|
392
|
+
// method with "file" in the name like copy_to_file.
|
|
393
|
+
|
|
394
|
+
static const char* kwlist[] = {"table", "dest", "header", "delimiter", "quote", 0};
|
|
395
|
+
|
|
396
|
+
PyObject* table;
|
|
397
|
+
PyObject* dest;
|
|
398
|
+
int header = 0;
|
|
399
|
+
char* szDelimiter = 0;
|
|
400
|
+
char* szQuote = 0;
|
|
401
|
+
|
|
402
|
+
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "UO|pzz", (char**)kwlist, &table, &dest, &header,
|
|
403
|
+
&szDelimiter, &szQuote)) {
|
|
404
|
+
return 0;
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
Connection* cnxn = CastConnection(self, REQUIRE_OPEN);
|
|
408
|
+
if (!cnxn)
|
|
409
|
+
return 0;
|
|
410
|
+
|
|
411
|
+
if (!PyObject_HasAttrString(dest, "write"))
|
|
412
|
+
return PyErr_Format(Error, "CSV destination must be a file-like object.");
|
|
413
|
+
Object write_method(PyObject_GetAttrString(dest, "write"));
|
|
414
|
+
|
|
415
|
+
char header_token[] = "header";
|
|
416
|
+
if (header == 0) {
|
|
417
|
+
header_token[0] = 0;
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
const char* pszDelimiter = szDelimiter ? szDelimiter : ",";
|
|
421
|
+
const char* pszQuote = szQuote ? szQuote : "\"";
|
|
422
|
+
|
|
423
|
+
Object sql(PyUnicode_FromFormat("copy %U to stdout with csv %s delimiter '%s' quote '%s'",
|
|
424
|
+
table, header_token, pszDelimiter, pszQuote));
|
|
425
|
+
if (!sql)
|
|
426
|
+
return 0;
|
|
427
|
+
|
|
428
|
+
const char* szSQL = PyUnicode_AsUTF8(sql);
|
|
429
|
+
if (!szSQL)
|
|
430
|
+
return 0;
|
|
431
|
+
|
|
432
|
+
ResultHolder result;
|
|
433
|
+
Py_BEGIN_ALLOW_THREADS
|
|
434
|
+
result = PQexec(cnxn->pgconn, szSQL);
|
|
435
|
+
Py_END_ALLOW_THREADS
|
|
436
|
+
|
|
437
|
+
if (result == 0)
|
|
438
|
+
return 0;
|
|
439
|
+
|
|
440
|
+
switch (PQresultStatus(result)) {
|
|
441
|
+
case PGRES_COPY_OUT:
|
|
442
|
+
// This is what we are expecting.
|
|
443
|
+
break;
|
|
444
|
+
|
|
445
|
+
case PGRES_BAD_RESPONSE:
|
|
446
|
+
case PGRES_NONFATAL_ERROR:
|
|
447
|
+
case PGRES_FATAL_ERROR:
|
|
448
|
+
return SetResultError(result.Detach());
|
|
449
|
+
|
|
450
|
+
default:
|
|
451
|
+
return PyErr_Format(Error, "Result was not PGRES_COPY_IN: %d", (int)PQresultStatus(result));
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
for (;;) {
|
|
456
|
+
int cb = 0;
|
|
457
|
+
char* buffer;
|
|
458
|
+
Py_BEGIN_ALLOW_THREADS
|
|
459
|
+
cb = PQgetCopyData(cnxn->pgconn, &buffer, 0);
|
|
460
|
+
Py_END_ALLOW_THREADS
|
|
461
|
+
|
|
462
|
+
if (cb == -2) {
|
|
463
|
+
return SetResultError(result.Detach());
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
if (cb == -1) {
|
|
467
|
+
// The copy is complete.
|
|
468
|
+
break;
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
// We have a buffer of byte data. We have the length, but the libpq docs say that the
|
|
472
|
+
// string is also zero terminated, so we're going to try not calling 'write'.
|
|
473
|
+
|
|
474
|
+
int err = PyFile_WriteString(buffer, dest);
|
|
475
|
+
|
|
476
|
+
// while (cb > 0) {
|
|
477
|
+
// PyObject* res = PyObject_CallObject(write_method)
|
|
478
|
+
// }
|
|
479
|
+
|
|
480
|
+
PQfreemem(buffer);
|
|
481
|
+
if (err) {
|
|
482
|
+
return 0;
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
|
|
486
|
+
// After a copy, you have to get another result to know if it was successful.
|
|
487
|
+
|
|
488
|
+
ResultHolder final_result;
|
|
489
|
+
ExecStatusType status = PGRES_COMMAND_OK;
|
|
490
|
+
Py_BEGIN_ALLOW_THREADS
|
|
491
|
+
final_result = PQgetResult(cnxn->pgconn);
|
|
492
|
+
status = PQresultStatus(final_result);
|
|
493
|
+
Py_END_ALLOW_THREADS
|
|
494
|
+
|
|
495
|
+
if (status != PGRES_COMMAND_OK) {
|
|
496
|
+
// SetResultError will take ownership of `result`.
|
|
497
|
+
return SetResultError(final_result.Detach());
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
const char* sz = PQcmdTuples(final_result);
|
|
501
|
+
if (sz == 0 || *sz == 0)
|
|
502
|
+
Py_RETURN_NONE;
|
|
503
|
+
return PyLong_FromLong(atoi(sz));
|
|
504
|
+
}
|
|
505
|
+
|
|
506
|
+
|
|
357
507
|
const char* doc_copy_from_csv =
|
|
358
508
|
"Connection.copy_from_csv(table, source, header=0) --> int\n"
|
|
359
509
|
"\n"
|
|
@@ -390,15 +540,16 @@ static PyObject* Connection_copy_from_csv(PyObject* self, PyObject* args, PyObje
|
|
|
390
540
|
|
|
391
541
|
const char* pszDelimiter = szDelimiter ? szDelimiter : ",";
|
|
392
542
|
const char* pszQuote = szQuote ? szQuote : "\"";
|
|
393
|
-
|
|
394
|
-
|
|
543
|
+
Object sql(PyUnicode_FromFormat("copy %U from stdin with csv %s delimiter '%s' quote '%s'",
|
|
544
|
+
table, header_token, pszDelimiter, pszQuote));
|
|
395
545
|
|
|
396
546
|
// If source is a string (Unicode), store the UTF-encoded value in buffer. If a byte
|
|
397
547
|
// object, store directly in buffer. Otherwise, buffer will be zero and `source` must be
|
|
398
548
|
// an object with a read method (e.g. file).
|
|
399
549
|
const char* buffer = 0;
|
|
400
550
|
Py_ssize_t buffer_size = 0;
|
|
401
|
-
|
|
551
|
+
Object read_method;
|
|
552
|
+
// PyObject* read_method = 0;
|
|
402
553
|
|
|
403
554
|
if (PyUnicode_Check(source))
|
|
404
555
|
{
|
|
@@ -410,7 +561,7 @@ static PyObject* Connection_copy_from_csv(PyObject* self, PyObject* args, PyObje
|
|
|
410
561
|
{
|
|
411
562
|
if (!PyObject_HasAttrString(source, "read"))
|
|
412
563
|
return PyErr_Format(Error, "CSV source must be a string or file-like object.");
|
|
413
|
-
read_method
|
|
564
|
+
read_method.Attach(PyObject_GetAttrString(source, "read"));
|
|
414
565
|
}
|
|
415
566
|
|
|
416
567
|
Connection* cnxn = CastConnection(self, REQUIRE_OPEN);
|
|
@@ -418,6 +569,9 @@ static PyObject* Connection_copy_from_csv(PyObject* self, PyObject* args, PyObje
|
|
|
418
569
|
return 0;
|
|
419
570
|
|
|
420
571
|
const char* szSQL = PyUnicode_AsUTF8(sql);
|
|
572
|
+
if (!szSQL)
|
|
573
|
+
return 0;
|
|
574
|
+
|
|
421
575
|
ResultHolder result;
|
|
422
576
|
Py_BEGIN_ALLOW_THREADS
|
|
423
577
|
result = PQexec(cnxn->pgconn, szSQL);
|
|
@@ -1000,6 +1154,9 @@ static PyObject* Connection_sendQuery(PyObject* self, PyObject* args)
|
|
|
1000
1154
|
|
|
1001
1155
|
int sent;
|
|
1002
1156
|
const char* szSQL = PyUnicode_AsUTF8(pScript);
|
|
1157
|
+
if (!szSQL)
|
|
1158
|
+
return 0;
|
|
1159
|
+
|
|
1003
1160
|
Py_BEGIN_ALLOW_THREADS
|
|
1004
1161
|
sent = PQsendQuery(cnxn->pgconn, szSQL);
|
|
1005
1162
|
Py_END_ALLOW_THREADS
|
|
@@ -1039,7 +1196,10 @@ static PyObject* Connection_sendQueryParams(PyObject* self, PyObject* args)
|
|
|
1039
1196
|
PyErr_SetString(PyExc_TypeError, "The first argument must be the SQL string.");
|
|
1040
1197
|
return 0;
|
|
1041
1198
|
}
|
|
1199
|
+
|
|
1042
1200
|
const char* szSQL = PyUnicode_AsUTF8(pSql);
|
|
1201
|
+
if (!szSQL)
|
|
1202
|
+
return 0;
|
|
1043
1203
|
|
|
1044
1204
|
Params params(cParams);
|
|
1045
1205
|
if (!BindParams(cnxn, params, args))
|
|
@@ -1462,6 +1622,7 @@ static struct PyMethodDef Connection_methods[] =
|
|
|
1462
1622
|
{ "script", Connection_script, METH_VARARGS, doc_script },
|
|
1463
1623
|
{ "copy_from", (PyCFunction) Connection_copy_from, METH_VARARGS | METH_KEYWORDS, doc_copy_from },
|
|
1464
1624
|
{ "copy_from_csv", (PyCFunction) Connection_copy_from_csv, METH_VARARGS | METH_KEYWORDS, doc_copy_from_csv },
|
|
1625
|
+
{ "copy_to_csv", (PyCFunction) Connection_copy_to_csv, METH_VARARGS | METH_KEYWORDS, doc_copy_to_csv},
|
|
1465
1626
|
{ "begin", Connection_begin, METH_NOARGS, doc_begin },
|
|
1466
1627
|
{ "commit", Connection_commit, METH_NOARGS, doc_commit },
|
|
1467
1628
|
{ "rollback", Connection_rollback, METH_NOARGS, doc_rollback },
|
|
@@ -135,7 +135,7 @@ struct Object
|
|
|
135
135
|
{
|
|
136
136
|
PyObject* p;
|
|
137
137
|
|
|
138
|
-
// Borrows the reference (takes ownership without adding a new
|
|
138
|
+
// Borrows the reference (takes ownership without adding a new reference).
|
|
139
139
|
Object(PyObject* _p = 0) { p = _p; }
|
|
140
140
|
|
|
141
141
|
// If it still has the pointer, it dereferences it.
|
|
@@ -3,11 +3,11 @@
|
|
|
3
3
|
|
|
4
4
|
# pylint: disable=missing-function-docstring,redefined-outer-name,unidiomatic-typecheck
|
|
5
5
|
|
|
6
|
-
import sys, threading, gzip, uuid, locale
|
|
6
|
+
import sys, threading, gzip, uuid, locale, tempfile, csv
|
|
7
7
|
from time import sleep
|
|
8
8
|
from os.path import join, dirname, exists
|
|
9
9
|
from decimal import Decimal
|
|
10
|
-
from datetime import date, time, datetime, timedelta
|
|
10
|
+
from datetime import date, time, datetime, timedelta
|
|
11
11
|
from pathlib import Path
|
|
12
12
|
import pytest
|
|
13
13
|
|
|
@@ -32,7 +32,7 @@ def cnxn():
|
|
|
32
32
|
cnxn = pglib.connect(CONNINFO)
|
|
33
33
|
for i in range(3):
|
|
34
34
|
try:
|
|
35
|
-
cnxn.execute("drop table t
|
|
35
|
+
cnxn.execute(f"drop table t{i}")
|
|
36
36
|
except: # noqa
|
|
37
37
|
pass
|
|
38
38
|
yield cnxn
|
|
@@ -44,9 +44,9 @@ def _test_strtype(cnxn, sqltype, value, resulttype=None, colsize=None):
|
|
|
44
44
|
assert colsize is None or (value is None or colsize >= len(value))
|
|
45
45
|
|
|
46
46
|
if colsize:
|
|
47
|
-
sql = "create table t1(s
|
|
47
|
+
sql = f"create table t1(s {sqltype}({colsize}))"
|
|
48
48
|
else:
|
|
49
|
-
sql = "create table t1(s
|
|
49
|
+
sql = f"create table t1(s {sqltype})"
|
|
50
50
|
|
|
51
51
|
if resulttype is None:
|
|
52
52
|
resulttype = type(value)
|
|
@@ -148,7 +148,37 @@ def test_script(cnxn):
|
|
|
148
148
|
|
|
149
149
|
|
|
150
150
|
#
|
|
151
|
-
# copy
|
|
151
|
+
# copy to
|
|
152
|
+
#
|
|
153
|
+
|
|
154
|
+
def test_copytocsv(cnxn):
|
|
155
|
+
# Write a table to a CSV file.
|
|
156
|
+
#
|
|
157
|
+
# It isn't as efficient as I'd like since we can no longer get ahold of the file
|
|
158
|
+
# descriptor. I think we'll want to make this accept a string filename for a more
|
|
159
|
+
# efficient write. For now, make sure the output file is opened in text mode.
|
|
160
|
+
|
|
161
|
+
cnxn.execute("create table t1(a text, b text)")
|
|
162
|
+
cnxn.execute("insert into t1 values ('1', 'one'), ('2', 'two'), ('3', 'three')")
|
|
163
|
+
|
|
164
|
+
with tempfile.NamedTemporaryFile(mode='w', encoding='utf8') as tf:
|
|
165
|
+
print(tf.name)
|
|
166
|
+
count = cnxn.copy_to_csv('t1', tf, header=1)
|
|
167
|
+
assert count == 3
|
|
168
|
+
|
|
169
|
+
tf.seek(0)
|
|
170
|
+
with open(tf.name, mode='r', encoding='utf8') as fd:
|
|
171
|
+
reader = csv.reader(fd)
|
|
172
|
+
rows = []
|
|
173
|
+
for row in reader:
|
|
174
|
+
print('ROW=', row)
|
|
175
|
+
rows.append(row)
|
|
176
|
+
|
|
177
|
+
assert rows == [['a', 'b'], ['1', 'one'], ['2', 'two'], ['3', 'three']]
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
#
|
|
181
|
+
# copy from
|
|
152
182
|
#
|
|
153
183
|
|
|
154
184
|
def _datapath(filename):
|
|
@@ -159,7 +189,8 @@ def _datapath(filename):
|
|
|
159
189
|
|
|
160
190
|
def test_copyfromcsv_csv(cnxn):
|
|
161
191
|
cnxn.execute("create table t1(a int, b varchar(20))")
|
|
162
|
-
|
|
192
|
+
with open(_datapath('test-noheader.csv')) as fd:
|
|
193
|
+
count = cnxn.copy_from_csv("t1", fd)
|
|
163
194
|
assert count == 2
|
|
164
195
|
assert cnxn.fetchval("select count(*) from t1") == 2
|
|
165
196
|
row = cnxn.fetchrow("select a,b from t1 where a=2")
|
|
@@ -169,7 +200,8 @@ def test_copyfromcsv_csv(cnxn):
|
|
|
169
200
|
|
|
170
201
|
def test_copyfromcsv_csv_header(cnxn):
|
|
171
202
|
cnxn.execute("create table t1(a int, b varchar(20))")
|
|
172
|
-
|
|
203
|
+
with open(_datapath('test-header.csv')) as fd:
|
|
204
|
+
count = cnxn.copy_from_csv("t1", fd, header=True)
|
|
173
205
|
assert count == 2
|
|
174
206
|
assert cnxn.fetchval("select count(*) from t1") == 2
|
|
175
207
|
row = cnxn.fetchrow("select a,b from t1 where a=2")
|
|
@@ -184,13 +216,15 @@ def test_copyfromcsv_csv_error(cnxn):
|
|
|
184
216
|
# We'll make the second column too small.
|
|
185
217
|
cnxn.execute("create table t1(a int, b varchar(1) not null)")
|
|
186
218
|
with pytest.raises(pglib.Error):
|
|
187
|
-
|
|
219
|
+
with open(_datapath('test-noheader.csv')) as fd:
|
|
220
|
+
cnxn.copy_from_csv("t1", fd)
|
|
188
221
|
|
|
189
222
|
|
|
190
223
|
def test_copyfromcsv_csv_gzip(cnxn):
|
|
191
224
|
# I don't remember why this test is here. We're feeding it unzipped data.
|
|
192
225
|
cnxn.execute("create table t1(a int, b varchar(20))")
|
|
193
|
-
|
|
226
|
+
with gzip.open(_datapath('test-header.csv.gz')) as fd:
|
|
227
|
+
cnxn.copy_from_csv("t1", fd, header=True)
|
|
194
228
|
assert cnxn.fetchval("select count(*) from t1") == 2
|
|
195
229
|
row = cnxn.fetchrow("select a,b from t1 where a=2")
|
|
196
230
|
assert row.a == 2
|
|
@@ -1063,7 +1097,7 @@ def test_closed_error(cnxn):
|
|
|
1063
1097
|
cnxn.rollback()
|
|
1064
1098
|
|
|
1065
1099
|
|
|
1066
|
-
def
|
|
1100
|
+
def test_connection_count():
|
|
1067
1101
|
before = pglib.connection_count()
|
|
1068
1102
|
cnxn = pglib.connect(CONNINFO)
|
|
1069
1103
|
assert pglib.connection_count() == (before + 1)
|
|
@@ -1073,6 +1107,18 @@ def test_count():
|
|
|
1073
1107
|
assert pglib.connection_count() == before
|
|
1074
1108
|
|
|
1075
1109
|
|
|
1110
|
+
def test_count(cnxn):
|
|
1111
|
+
"Ensure delete statements return affected row count."
|
|
1112
|
+
cnxn.execute(
|
|
1113
|
+
"""
|
|
1114
|
+
select generate_series(1, 3) as id
|
|
1115
|
+
into t1
|
|
1116
|
+
""")
|
|
1117
|
+
|
|
1118
|
+
count = cnxn.execute("delete from t1 where id in (1, 2, 3)")
|
|
1119
|
+
assert count == 3
|
|
1120
|
+
|
|
1121
|
+
|
|
1076
1122
|
def test_hstore(cnxn):
|
|
1077
1123
|
cnxn.execute("create extension if not exists hstore")
|
|
1078
1124
|
row = cnxn.fetchrow("select oid, typname from pg_type where typname='hstore'")
|
pglib-5.7.1/pglib/_version.py
DELETED
|
@@ -1,484 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
# This file helps to compute a version number in source trees obtained from
|
|
3
|
-
# git-archive tarball (such as those provided by githubs download-from-tag
|
|
4
|
-
# feature). Distribution tarballs (built by setup.py sdist) and build
|
|
5
|
-
# directories (produced by setup.py build) will contain a much shorter file
|
|
6
|
-
# that just contains the computed version number.
|
|
7
|
-
|
|
8
|
-
# This file is released into the public domain. Generated by
|
|
9
|
-
# versioneer-0.16 (https://github.com/warner/python-versioneer)
|
|
10
|
-
|
|
11
|
-
"""Git implementation of _version.py."""
|
|
12
|
-
|
|
13
|
-
import errno
|
|
14
|
-
import os
|
|
15
|
-
import re
|
|
16
|
-
import subprocess
|
|
17
|
-
import sys
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
def get_keywords():
|
|
21
|
-
"""Get the keywords needed to look up the version information."""
|
|
22
|
-
# these strings will be replaced by git during git-archive.
|
|
23
|
-
# setup.py/versioneer.py will grep for the variable names, so they must
|
|
24
|
-
# each be defined on a line of their own. _version.py will just call
|
|
25
|
-
# get_keywords().
|
|
26
|
-
git_refnames = "$Format:%d$"
|
|
27
|
-
git_full = "$Format:%H$"
|
|
28
|
-
keywords = {"refnames": git_refnames, "full": git_full}
|
|
29
|
-
return keywords
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
class VersioneerConfig:
|
|
33
|
-
"""Container for Versioneer configuration parameters."""
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
def get_config():
|
|
37
|
-
"""Create, populate and return the VersioneerConfig() object."""
|
|
38
|
-
# these strings are filled in when 'setup.py versioneer' creates
|
|
39
|
-
# _version.py
|
|
40
|
-
cfg = VersioneerConfig()
|
|
41
|
-
cfg.VCS = "git"
|
|
42
|
-
cfg.style = "pep440"
|
|
43
|
-
cfg.tag_prefix = ""
|
|
44
|
-
cfg.parentdir_prefix = "None"
|
|
45
|
-
cfg.versionfile_source = "pglib/_version.py"
|
|
46
|
-
cfg.verbose = False
|
|
47
|
-
return cfg
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
class NotThisMethod(Exception):
|
|
51
|
-
"""Exception raised if a method is not valid for the current scenario."""
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
LONG_VERSION_PY = {}
|
|
55
|
-
HANDLERS = {}
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
def register_vcs_handler(vcs, method): # decorator
|
|
59
|
-
"""Decorator to mark a method as the handler for a particular VCS."""
|
|
60
|
-
def decorate(f):
|
|
61
|
-
"""Store f in HANDLERS[vcs][method]."""
|
|
62
|
-
if vcs not in HANDLERS:
|
|
63
|
-
HANDLERS[vcs] = {}
|
|
64
|
-
HANDLERS[vcs][method] = f
|
|
65
|
-
return f
|
|
66
|
-
return decorate
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
|
|
70
|
-
"""Call the given command(s)."""
|
|
71
|
-
assert isinstance(commands, list)
|
|
72
|
-
p = None
|
|
73
|
-
for c in commands:
|
|
74
|
-
try:
|
|
75
|
-
dispcmd = str([c] + args)
|
|
76
|
-
# remember shell=False, so use git.cmd on windows, not just git
|
|
77
|
-
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
|
|
78
|
-
stderr=(subprocess.PIPE if hide_stderr
|
|
79
|
-
else None))
|
|
80
|
-
break
|
|
81
|
-
except EnvironmentError:
|
|
82
|
-
e = sys.exc_info()[1]
|
|
83
|
-
if e.errno == errno.ENOENT:
|
|
84
|
-
continue
|
|
85
|
-
if verbose:
|
|
86
|
-
print("unable to run %s" % dispcmd)
|
|
87
|
-
print(e)
|
|
88
|
-
return None
|
|
89
|
-
else:
|
|
90
|
-
if verbose:
|
|
91
|
-
print("unable to find command, tried %s" % (commands,))
|
|
92
|
-
return None
|
|
93
|
-
stdout = p.communicate()[0].strip()
|
|
94
|
-
if sys.version_info[0] >= 3:
|
|
95
|
-
stdout = stdout.decode()
|
|
96
|
-
if p.returncode != 0:
|
|
97
|
-
if verbose:
|
|
98
|
-
print("unable to run %s (error)" % dispcmd)
|
|
99
|
-
return None
|
|
100
|
-
return stdout
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
def versions_from_parentdir(parentdir_prefix, root, verbose):
|
|
104
|
-
"""Try to determine the version from the parent directory name.
|
|
105
|
-
|
|
106
|
-
Source tarballs conventionally unpack into a directory that includes
|
|
107
|
-
both the project name and a version string.
|
|
108
|
-
"""
|
|
109
|
-
dirname = os.path.basename(root)
|
|
110
|
-
if not dirname.startswith(parentdir_prefix):
|
|
111
|
-
if verbose:
|
|
112
|
-
print("guessing rootdir is '%s', but '%s' doesn't start with "
|
|
113
|
-
"prefix '%s'" % (root, dirname, parentdir_prefix))
|
|
114
|
-
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
|
|
115
|
-
return {"version": dirname[len(parentdir_prefix):],
|
|
116
|
-
"full-revisionid": None,
|
|
117
|
-
"dirty": False, "error": None}
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
@register_vcs_handler("git", "get_keywords")
|
|
121
|
-
def git_get_keywords(versionfile_abs):
|
|
122
|
-
"""Extract version information from the given file."""
|
|
123
|
-
# the code embedded in _version.py can just fetch the value of these
|
|
124
|
-
# keywords. When used from setup.py, we don't want to import _version.py,
|
|
125
|
-
# so we do it with a regexp instead. This function is not used from
|
|
126
|
-
# _version.py.
|
|
127
|
-
keywords = {}
|
|
128
|
-
try:
|
|
129
|
-
f = open(versionfile_abs, "r")
|
|
130
|
-
for line in f.readlines():
|
|
131
|
-
if line.strip().startswith("git_refnames ="):
|
|
132
|
-
mo = re.search(r'=\s*"(.*)"', line)
|
|
133
|
-
if mo:
|
|
134
|
-
keywords["refnames"] = mo.group(1)
|
|
135
|
-
if line.strip().startswith("git_full ="):
|
|
136
|
-
mo = re.search(r'=\s*"(.*)"', line)
|
|
137
|
-
if mo:
|
|
138
|
-
keywords["full"] = mo.group(1)
|
|
139
|
-
f.close()
|
|
140
|
-
except EnvironmentError:
|
|
141
|
-
pass
|
|
142
|
-
return keywords
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
@register_vcs_handler("git", "keywords")
|
|
146
|
-
def git_versions_from_keywords(keywords, tag_prefix, verbose):
|
|
147
|
-
"""Get version information from git keywords."""
|
|
148
|
-
if not keywords:
|
|
149
|
-
raise NotThisMethod("no keywords at all, weird")
|
|
150
|
-
refnames = keywords["refnames"].strip()
|
|
151
|
-
if refnames.startswith("$Format"):
|
|
152
|
-
if verbose:
|
|
153
|
-
print("keywords are unexpanded, not using")
|
|
154
|
-
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
|
|
155
|
-
refs = set([r.strip() for r in refnames.strip("()").split(",")])
|
|
156
|
-
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
|
|
157
|
-
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
|
|
158
|
-
TAG = "tag: "
|
|
159
|
-
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
|
|
160
|
-
if not tags:
|
|
161
|
-
# Either we're using git < 1.8.3, or there really are no tags. We use
|
|
162
|
-
# a heuristic: assume all version tags have a digit. The old git %d
|
|
163
|
-
# expansion behaves like git log --decorate=short and strips out the
|
|
164
|
-
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
|
|
165
|
-
# between branches and tags. By ignoring refnames without digits, we
|
|
166
|
-
# filter out many common branch names like "release" and
|
|
167
|
-
# "stabilization", as well as "HEAD" and "master".
|
|
168
|
-
tags = set([r for r in refs if re.search(r'\d', r)])
|
|
169
|
-
if verbose:
|
|
170
|
-
print("discarding '%s', no digits" % ",".join(refs-tags))
|
|
171
|
-
if verbose:
|
|
172
|
-
print("likely tags: %s" % ",".join(sorted(tags)))
|
|
173
|
-
for ref in sorted(tags):
|
|
174
|
-
# sorting will prefer e.g. "2.0" over "2.0rc1"
|
|
175
|
-
if ref.startswith(tag_prefix):
|
|
176
|
-
r = ref[len(tag_prefix):]
|
|
177
|
-
if verbose:
|
|
178
|
-
print("picking %s" % r)
|
|
179
|
-
return {"version": r,
|
|
180
|
-
"full-revisionid": keywords["full"].strip(),
|
|
181
|
-
"dirty": False, "error": None
|
|
182
|
-
}
|
|
183
|
-
# no suitable tags, so version is "0+unknown", but full hex is still there
|
|
184
|
-
if verbose:
|
|
185
|
-
print("no suitable tags, using unknown + full revision id")
|
|
186
|
-
return {"version": "0+unknown",
|
|
187
|
-
"full-revisionid": keywords["full"].strip(),
|
|
188
|
-
"dirty": False, "error": "no suitable tags"}
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
@register_vcs_handler("git", "pieces_from_vcs")
|
|
192
|
-
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
|
|
193
|
-
"""Get version from 'git describe' in the root of the source tree.
|
|
194
|
-
|
|
195
|
-
This only gets called if the git-archive 'subst' keywords were *not*
|
|
196
|
-
expanded, and _version.py hasn't already been rewritten with a short
|
|
197
|
-
version string, meaning we're inside a checked out source tree.
|
|
198
|
-
"""
|
|
199
|
-
if not os.path.exists(os.path.join(root, ".git")):
|
|
200
|
-
if verbose:
|
|
201
|
-
print("no .git in %s" % root)
|
|
202
|
-
raise NotThisMethod("no .git directory")
|
|
203
|
-
|
|
204
|
-
GITS = ["git"]
|
|
205
|
-
if sys.platform == "win32":
|
|
206
|
-
GITS = ["git.cmd", "git.exe"]
|
|
207
|
-
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
|
|
208
|
-
# if there isn't one, this yields HEX[-dirty] (no NUM)
|
|
209
|
-
describe_out = run_command(GITS, ["describe", "--tags", "--dirty",
|
|
210
|
-
"--always", "--long",
|
|
211
|
-
"--match", "%s*" % tag_prefix],
|
|
212
|
-
cwd=root)
|
|
213
|
-
# --long was added in git-1.5.5
|
|
214
|
-
if describe_out is None:
|
|
215
|
-
raise NotThisMethod("'git describe' failed")
|
|
216
|
-
describe_out = describe_out.strip()
|
|
217
|
-
full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
|
|
218
|
-
if full_out is None:
|
|
219
|
-
raise NotThisMethod("'git rev-parse' failed")
|
|
220
|
-
full_out = full_out.strip()
|
|
221
|
-
|
|
222
|
-
pieces = {}
|
|
223
|
-
pieces["long"] = full_out
|
|
224
|
-
pieces["short"] = full_out[:7] # maybe improved later
|
|
225
|
-
pieces["error"] = None
|
|
226
|
-
|
|
227
|
-
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
|
|
228
|
-
# TAG might have hyphens.
|
|
229
|
-
git_describe = describe_out
|
|
230
|
-
|
|
231
|
-
# look for -dirty suffix
|
|
232
|
-
dirty = git_describe.endswith("-dirty")
|
|
233
|
-
pieces["dirty"] = dirty
|
|
234
|
-
if dirty:
|
|
235
|
-
git_describe = git_describe[:git_describe.rindex("-dirty")]
|
|
236
|
-
|
|
237
|
-
# now we have TAG-NUM-gHEX or HEX
|
|
238
|
-
|
|
239
|
-
if "-" in git_describe:
|
|
240
|
-
# TAG-NUM-gHEX
|
|
241
|
-
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
|
|
242
|
-
if not mo:
|
|
243
|
-
# unparseable. Maybe git-describe is misbehaving?
|
|
244
|
-
pieces["error"] = ("unable to parse git-describe output: '%s'"
|
|
245
|
-
% describe_out)
|
|
246
|
-
return pieces
|
|
247
|
-
|
|
248
|
-
# tag
|
|
249
|
-
full_tag = mo.group(1)
|
|
250
|
-
if not full_tag.startswith(tag_prefix):
|
|
251
|
-
if verbose:
|
|
252
|
-
fmt = "tag '%s' doesn't start with prefix '%s'"
|
|
253
|
-
print(fmt % (full_tag, tag_prefix))
|
|
254
|
-
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
|
|
255
|
-
% (full_tag, tag_prefix))
|
|
256
|
-
return pieces
|
|
257
|
-
pieces["closest-tag"] = full_tag[len(tag_prefix):]
|
|
258
|
-
|
|
259
|
-
# distance: number of commits since tag
|
|
260
|
-
pieces["distance"] = int(mo.group(2))
|
|
261
|
-
|
|
262
|
-
# commit: short hex revision ID
|
|
263
|
-
pieces["short"] = mo.group(3)
|
|
264
|
-
|
|
265
|
-
else:
|
|
266
|
-
# HEX: no tags
|
|
267
|
-
pieces["closest-tag"] = None
|
|
268
|
-
count_out = run_command(GITS, ["rev-list", "HEAD", "--count"],
|
|
269
|
-
cwd=root)
|
|
270
|
-
pieces["distance"] = int(count_out) # total number of commits
|
|
271
|
-
|
|
272
|
-
return pieces
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
def plus_or_dot(pieces):
|
|
276
|
-
"""Return a + if we don't already have one, else return a ."""
|
|
277
|
-
if "+" in pieces.get("closest-tag", ""):
|
|
278
|
-
return "."
|
|
279
|
-
return "+"
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
def render_pep440(pieces):
|
|
283
|
-
"""Build up version string, with post-release "local version identifier".
|
|
284
|
-
|
|
285
|
-
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
|
|
286
|
-
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
|
|
287
|
-
|
|
288
|
-
Exceptions:
|
|
289
|
-
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
|
|
290
|
-
"""
|
|
291
|
-
if pieces["closest-tag"]:
|
|
292
|
-
rendered = pieces["closest-tag"]
|
|
293
|
-
if pieces["distance"] or pieces["dirty"]:
|
|
294
|
-
rendered += plus_or_dot(pieces)
|
|
295
|
-
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
|
|
296
|
-
if pieces["dirty"]:
|
|
297
|
-
rendered += ".dirty"
|
|
298
|
-
else:
|
|
299
|
-
# exception #1
|
|
300
|
-
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
|
|
301
|
-
pieces["short"])
|
|
302
|
-
if pieces["dirty"]:
|
|
303
|
-
rendered += ".dirty"
|
|
304
|
-
return rendered
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
def render_pep440_pre(pieces):
|
|
308
|
-
"""TAG[.post.devDISTANCE] -- No -dirty.
|
|
309
|
-
|
|
310
|
-
Exceptions:
|
|
311
|
-
1: no tags. 0.post.devDISTANCE
|
|
312
|
-
"""
|
|
313
|
-
if pieces["closest-tag"]:
|
|
314
|
-
rendered = pieces["closest-tag"]
|
|
315
|
-
if pieces["distance"]:
|
|
316
|
-
rendered += ".post.dev%d" % pieces["distance"]
|
|
317
|
-
else:
|
|
318
|
-
# exception #1
|
|
319
|
-
rendered = "0.post.dev%d" % pieces["distance"]
|
|
320
|
-
return rendered
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
def render_pep440_post(pieces):
|
|
324
|
-
"""TAG[.postDISTANCE[.dev0]+gHEX] .
|
|
325
|
-
|
|
326
|
-
The ".dev0" means dirty. Note that .dev0 sorts backwards
|
|
327
|
-
(a dirty tree will appear "older" than the corresponding clean one),
|
|
328
|
-
but you shouldn't be releasing software with -dirty anyways.
|
|
329
|
-
|
|
330
|
-
Exceptions:
|
|
331
|
-
1: no tags. 0.postDISTANCE[.dev0]
|
|
332
|
-
"""
|
|
333
|
-
if pieces["closest-tag"]:
|
|
334
|
-
rendered = pieces["closest-tag"]
|
|
335
|
-
if pieces["distance"] or pieces["dirty"]:
|
|
336
|
-
rendered += ".post%d" % pieces["distance"]
|
|
337
|
-
if pieces["dirty"]:
|
|
338
|
-
rendered += ".dev0"
|
|
339
|
-
rendered += plus_or_dot(pieces)
|
|
340
|
-
rendered += "g%s" % pieces["short"]
|
|
341
|
-
else:
|
|
342
|
-
# exception #1
|
|
343
|
-
rendered = "0.post%d" % pieces["distance"]
|
|
344
|
-
if pieces["dirty"]:
|
|
345
|
-
rendered += ".dev0"
|
|
346
|
-
rendered += "+g%s" % pieces["short"]
|
|
347
|
-
return rendered
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
def render_pep440_old(pieces):
|
|
351
|
-
"""TAG[.postDISTANCE[.dev0]] .
|
|
352
|
-
|
|
353
|
-
The ".dev0" means dirty.
|
|
354
|
-
|
|
355
|
-
Eexceptions:
|
|
356
|
-
1: no tags. 0.postDISTANCE[.dev0]
|
|
357
|
-
"""
|
|
358
|
-
if pieces["closest-tag"]:
|
|
359
|
-
rendered = pieces["closest-tag"]
|
|
360
|
-
if pieces["distance"] or pieces["dirty"]:
|
|
361
|
-
rendered += ".post%d" % pieces["distance"]
|
|
362
|
-
if pieces["dirty"]:
|
|
363
|
-
rendered += ".dev0"
|
|
364
|
-
else:
|
|
365
|
-
# exception #1
|
|
366
|
-
rendered = "0.post%d" % pieces["distance"]
|
|
367
|
-
if pieces["dirty"]:
|
|
368
|
-
rendered += ".dev0"
|
|
369
|
-
return rendered
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
def render_git_describe(pieces):
|
|
373
|
-
"""TAG[-DISTANCE-gHEX][-dirty].
|
|
374
|
-
|
|
375
|
-
Like 'git describe --tags --dirty --always'.
|
|
376
|
-
|
|
377
|
-
Exceptions:
|
|
378
|
-
1: no tags. HEX[-dirty] (note: no 'g' prefix)
|
|
379
|
-
"""
|
|
380
|
-
if pieces["closest-tag"]:
|
|
381
|
-
rendered = pieces["closest-tag"]
|
|
382
|
-
if pieces["distance"]:
|
|
383
|
-
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
|
|
384
|
-
else:
|
|
385
|
-
# exception #1
|
|
386
|
-
rendered = pieces["short"]
|
|
387
|
-
if pieces["dirty"]:
|
|
388
|
-
rendered += "-dirty"
|
|
389
|
-
return rendered
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
def render_git_describe_long(pieces):
|
|
393
|
-
"""TAG-DISTANCE-gHEX[-dirty].
|
|
394
|
-
|
|
395
|
-
Like 'git describe --tags --dirty --always -long'.
|
|
396
|
-
The distance/hash is unconditional.
|
|
397
|
-
|
|
398
|
-
Exceptions:
|
|
399
|
-
1: no tags. HEX[-dirty] (note: no 'g' prefix)
|
|
400
|
-
"""
|
|
401
|
-
if pieces["closest-tag"]:
|
|
402
|
-
rendered = pieces["closest-tag"]
|
|
403
|
-
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
|
|
404
|
-
else:
|
|
405
|
-
# exception #1
|
|
406
|
-
rendered = pieces["short"]
|
|
407
|
-
if pieces["dirty"]:
|
|
408
|
-
rendered += "-dirty"
|
|
409
|
-
return rendered
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
def render(pieces, style):
|
|
413
|
-
"""Render the given version pieces into the requested style."""
|
|
414
|
-
if pieces["error"]:
|
|
415
|
-
return {"version": "unknown",
|
|
416
|
-
"full-revisionid": pieces.get("long"),
|
|
417
|
-
"dirty": None,
|
|
418
|
-
"error": pieces["error"]}
|
|
419
|
-
|
|
420
|
-
if not style or style == "default":
|
|
421
|
-
style = "pep440" # the default
|
|
422
|
-
|
|
423
|
-
if style == "pep440":
|
|
424
|
-
rendered = render_pep440(pieces)
|
|
425
|
-
elif style == "pep440-pre":
|
|
426
|
-
rendered = render_pep440_pre(pieces)
|
|
427
|
-
elif style == "pep440-post":
|
|
428
|
-
rendered = render_pep440_post(pieces)
|
|
429
|
-
elif style == "pep440-old":
|
|
430
|
-
rendered = render_pep440_old(pieces)
|
|
431
|
-
elif style == "git-describe":
|
|
432
|
-
rendered = render_git_describe(pieces)
|
|
433
|
-
elif style == "git-describe-long":
|
|
434
|
-
rendered = render_git_describe_long(pieces)
|
|
435
|
-
else:
|
|
436
|
-
raise ValueError("unknown style '%s'" % style)
|
|
437
|
-
|
|
438
|
-
return {"version": rendered, "full-revisionid": pieces["long"],
|
|
439
|
-
"dirty": pieces["dirty"], "error": None}
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
def get_versions():
|
|
443
|
-
"""Get version information or return default if unable to do so."""
|
|
444
|
-
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
|
|
445
|
-
# __file__, we can work backwards from there to the root. Some
|
|
446
|
-
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
|
|
447
|
-
# case we can only use expanded keywords.
|
|
448
|
-
|
|
449
|
-
cfg = get_config()
|
|
450
|
-
verbose = cfg.verbose
|
|
451
|
-
|
|
452
|
-
try:
|
|
453
|
-
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
|
|
454
|
-
verbose)
|
|
455
|
-
except NotThisMethod:
|
|
456
|
-
pass
|
|
457
|
-
|
|
458
|
-
try:
|
|
459
|
-
root = os.path.realpath(__file__)
|
|
460
|
-
# versionfile_source is the relative path from the top of the source
|
|
461
|
-
# tree (where the .git directory might live) to this file. Invert
|
|
462
|
-
# this to find the root from __file__.
|
|
463
|
-
for i in cfg.versionfile_source.split('/'):
|
|
464
|
-
root = os.path.dirname(root)
|
|
465
|
-
except NameError:
|
|
466
|
-
return {"version": "0+unknown", "full-revisionid": None,
|
|
467
|
-
"dirty": None,
|
|
468
|
-
"error": "unable to find root of source tree"}
|
|
469
|
-
|
|
470
|
-
try:
|
|
471
|
-
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
|
|
472
|
-
return render(pieces, cfg.style)
|
|
473
|
-
except NotThisMethod:
|
|
474
|
-
pass
|
|
475
|
-
|
|
476
|
-
try:
|
|
477
|
-
if cfg.parentdir_prefix:
|
|
478
|
-
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
|
|
479
|
-
except NotThisMethod:
|
|
480
|
-
pass
|
|
481
|
-
|
|
482
|
-
return {"version": "0+unknown", "full-revisionid": None,
|
|
483
|
-
"dirty": None,
|
|
484
|
-
"error": "unable to compute version"}
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|