pogi 2.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.vscode/launch.json +35 -0
- package/CHANGELOG.md +277 -0
- package/LICENSE +21 -0
- package/README.md +85 -0
- package/docs/API/PgDb.md +218 -0
- package/docs/API/PgSchema.md +91 -0
- package/docs/API/PgTable.md +365 -0
- package/docs/API/QueryOptions.md +77 -0
- package/docs/API/condition.md +133 -0
- package/docs/connection.md +91 -0
- package/docs/css/docs.css +164 -0
- package/docs/executingSqlFile.md +44 -0
- package/docs/faq.md +15 -0
- package/docs/functions.md +19 -0
- package/docs/generatingInterfaceForTables.md +35 -0
- package/docs/index.md +48 -0
- package/docs/logger.md +40 -0
- package/docs/mappingDatabaseTypes.md +89 -0
- package/docs/notification.md +19 -0
- package/docs/pitfalls.md +73 -0
- package/docs/streams.md +68 -0
- package/docs/transaction.md +65 -0
- package/lib/bin/generateInterface.d.ts +1 -0
- package/lib/bin/generateInterface.js +53 -0
- package/lib/bin/generateInterface.js.map +1 -0
- package/lib/connectionOptions.d.ts +25 -0
- package/lib/connectionOptions.js +3 -0
- package/lib/connectionOptions.js.map +1 -0
- package/lib/index.d.ts +6 -0
- package/lib/index.js +10 -0
- package/lib/index.js.map +1 -0
- package/lib/pgConverters.d.ts +10 -0
- package/lib/pgConverters.js +66 -0
- package/lib/pgConverters.js.map +1 -0
- package/lib/pgDb.d.ts +86 -0
- package/lib/pgDb.js +745 -0
- package/lib/pgDb.js.map +1 -0
- package/lib/pgDbLogger.d.ts +5 -0
- package/lib/pgDbLogger.js +3 -0
- package/lib/pgDbLogger.js.map +1 -0
- package/lib/pgDbOperators.d.ts +113 -0
- package/lib/pgDbOperators.js +44 -0
- package/lib/pgDbOperators.js.map +1 -0
- package/lib/pgSchema.d.ts +16 -0
- package/lib/pgSchema.js +16 -0
- package/lib/pgSchema.js.map +1 -0
- package/lib/pgTable.d.ts +131 -0
- package/lib/pgTable.js +322 -0
- package/lib/pgTable.js.map +1 -0
- package/lib/pgUtils.d.ts +31 -0
- package/lib/pgUtils.js +157 -0
- package/lib/pgUtils.js.map +1 -0
- package/lib/queryAble.d.ts +76 -0
- package/lib/queryAble.js +330 -0
- package/lib/queryAble.js.map +1 -0
- package/lib/queryWhere.d.ts +8 -0
- package/lib/queryWhere.js +249 -0
- package/lib/queryWhere.js.map +1 -0
- package/mkdocs.yml +25 -0
- package/package.json +65 -0
- package/spec/resources/init.sql +122 -0
- package/spec/resources/throw_exception.sql +5 -0
- package/spec/resources/tricky.sql +13 -0
- package/spec/run.js +5 -0
- package/spec/support/jasmine.json +9 -0
- package/src/bin/generateInterface.ts +54 -0
- package/src/connectionOptions.ts +42 -0
- package/src/index.ts +6 -0
- package/src/pgConverters.ts +55 -0
- package/src/pgDb.ts +820 -0
- package/src/pgDbLogger.ts +13 -0
- package/src/pgDbOperators.ts +62 -0
- package/src/pgSchema.ts +15 -0
- package/src/pgTable.ts +401 -0
- package/src/pgUtils.ts +176 -0
- package/src/queryAble.ts +393 -0
- package/src/queryWhere.ts +326 -0
- package/src/test/pgDbOperatorSpec.ts +492 -0
- package/src/test/pgDbSpec.ts +1339 -0
- package/src/test/pgServiceRestartTest.ts +1500 -0
- package/src/tsconfig.json +33 -0
- package/utils_sql/lower.sql +4 -0
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
## Executing sql files
|
|
2
|
+
|
|
3
|
+
### Run an sql file for a single schema
|
|
4
|
+
|
|
5
|
+
```js
|
|
6
|
+
|
|
7
|
+
let pgdb = await PgDb.connect(..);
|
|
8
|
+
await pgdb.execute(__dirname + '/init.sql');
|
|
9
|
+
|
|
10
|
+
```
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
### Run an sql file for multiple schemas:
|
|
14
|
+
|
|
15
|
+
```js
|
|
16
|
+
imports ..
|
|
17
|
+
|
|
18
|
+
let pgdb = await PgDb.connect(..);
|
|
19
|
+
try {
|
|
20
|
+
for (let schemaName of ['test1', 'test2']) {
|
|
21
|
+
await pgdb.execute(__dirname + '/db_upgrade/all.sql',
|
|
22
|
+
(cmd)=>cmd.replace(/__SCHEMA__/g, '"' + schemaName + '"'));
|
|
23
|
+
}
|
|
24
|
+
} catch (e) {
|
|
25
|
+
//log
|
|
26
|
+
}
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
for example the sql file is (note: `__SCHEMA__` will be replaced with the `schemaName` see above)
|
|
30
|
+
```sql
|
|
31
|
+
|
|
32
|
+
DO $$
|
|
33
|
+
BEGIN
|
|
34
|
+
BEGIN
|
|
35
|
+
ALTER TABLE __SCHEMA__.webapp add column lang char(2) default 'JS';
|
|
36
|
+
EXCEPTION
|
|
37
|
+
WHEN duplicate_column THEN RAISE NOTICE 'column <column_name> already exists in <table_name>.';
|
|
38
|
+
END;
|
|
39
|
+
END;
|
|
40
|
+
$$;
|
|
41
|
+
|
|
42
|
+
UPDATE __SCHEMA__.webapp set lang='TS' where lang='JS';
|
|
43
|
+
|
|
44
|
+
```
|
package/docs/faq.md
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
##Does it support prepared statements?
|
|
2
|
+
At the moment not, we can add it, but according to
|
|
3
|
+
[this](https://github.com/brianc/node-postgres/wiki/Parameterized-queries-and-Prepared-Statements)
|
|
4
|
+
they do not add big values.
|
|
5
|
+
|
|
6
|
+
##What is pogi?
|
|
7
|
+
**pgdb** name was taken, so we renamed it to **pogi**. Pogi is the nickname of a delicious pastry. Yam!
|
|
8
|
+
Search for 'pogacsa'.
|
|
9
|
+
|
|
10
|
+
##Is there any company behind it?
|
|
11
|
+
It was originally developed at [www.labcup.net](http://www.labcup.net/), where we also use it.
|
|
12
|
+
|
|
13
|
+
##My IDE don't show pogi's typing definitions
|
|
14
|
+
Typescript should get it from npm, but maybe this help:
|
|
15
|
+
`typings install pogi=github:holdfenytolvaj/pogi/lib/index.d.ts --save`
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
#Using stored procedures
|
|
2
|
+
When loading PgDb, stored procedures/functions will be loaded as well to the `fn` namespace for schemas. Like this function
|
|
3
|
+
```SQL
|
|
4
|
+
CREATE OR REPLACE FUNCTION increment(i INT)
|
|
5
|
+
RETURNS INT AS $$
|
|
6
|
+
BEGIN
|
|
7
|
+
RETURN i + 1;
|
|
8
|
+
END;
|
|
9
|
+
$$ LANGUAGE plpgsql;
|
|
10
|
+
```
|
|
11
|
+
could be used as easy as this
|
|
12
|
+
```js
|
|
13
|
+
var num = pgdb.fn.increment(4);
|
|
14
|
+
|
|
15
|
+
//or if is in a specific schema:
|
|
16
|
+
|
|
17
|
+
num = pgdb[schema].fn.increment(4);
|
|
18
|
+
```
|
|
19
|
+
It will detect return values, and if it is a single value or an array of single values it will resolve result row(s) for you
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
#Generating interface for tables
|
|
2
|
+
You can easily generate interface for tables to facilitate code, just run
|
|
3
|
+
```bash
|
|
4
|
+
export PGUSER='test'
|
|
5
|
+
export PGPASSWORD='test'
|
|
6
|
+
export PGDATABASE='test'
|
|
7
|
+
#using PGUSER, PGPASSWORD and PGDATABASE env variables
|
|
8
|
+
node --harmony pgdb/lib/bin/generateInterface > testDbInterface.ts
|
|
9
|
+
```
|
|
10
|
+
It will generate something like:
|
|
11
|
+
```js
|
|
12
|
+
import {PgDb, PgSchema, PgTable} from "pogi";
|
|
13
|
+
|
|
14
|
+
export interface PgDbType extends PgDb {
|
|
15
|
+
'pgdb_test': PgSchema_pgdb_test;
|
|
16
|
+
'schemas': {
|
|
17
|
+
'pgdb_test': PgSchema_pgdb_test;
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export interface PgSchema_pgdb_test extends PgSchema {
|
|
22
|
+
'users': PgTable;
|
|
23
|
+
tables: {
|
|
24
|
+
'users': PgTable;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
So you can use it as
|
|
30
|
+
```js
|
|
31
|
+
let pgdb = <PgDbType>await PgDb.connect({connectionString: "postgres://"});
|
|
32
|
+
let users = await pgdb.pgdb_test.users.findAll();
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
If you want to help, you can add, the table definition generation as well, also to merge if schemas or tables have the same type.
|
package/docs/index.md
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+

|
|
2
|
+
|
|
3
|
+
## pogi
|
|
4
|
+
**pogi** is an easy to use PostgreSQL handler for javascript, built on top of [pg.js](https://github.com/brianc/node-postgres)
|
|
5
|
+
(and inherited few things from [MassiveJS](https://github.com/robconery/massive-js)). Supporting connection pooling, transaction,
|
|
6
|
+
typescript, async-await, assignable logger, stream, executable sql files and what not, top of that with a lot of sensible default.
|
|
7
|
+
|
|
8
|
+
It is not a full-featured ORM, it is rather aligned with KISS.
|
|
9
|
+
Therefore no initial definitions are needed. It rather issues some initial queries at startup
|
|
10
|
+
to look up the schemas, tablenames and some special column types.
|
|
11
|
+
Aim to makes a seamless integration with js objects and removes boiler plate code but keeps
|
|
12
|
+
the power of custom queries.
|
|
13
|
+
|
|
14
|
+
```js
|
|
15
|
+
import {PgDb} from "pogi";
|
|
16
|
+
|
|
17
|
+
(async()=> {
|
|
18
|
+
let pgdb = await PgDb.connect({connectionString: "postgres://"});
|
|
19
|
+
|
|
20
|
+
let table = pgdb['test']['users']; //or pgdb.test.users if you generate the interface
|
|
21
|
+
|
|
22
|
+
let c1 = await pgdb.query(`SELECT COUNT(*) as c FROM ${table} WHERE active=:active`, {active: true});
|
|
23
|
+
let c2 = await table.count({active: true});
|
|
24
|
+
c1[0].c == c2 //true
|
|
25
|
+
|
|
26
|
+
let user = {name: 'admin'}
|
|
27
|
+
await table.insert(user);
|
|
28
|
+
|
|
29
|
+
await table.update({id: 1}, user);
|
|
30
|
+
|
|
31
|
+
let res = await table.find({id: [1,2,3]});
|
|
32
|
+
...
|
|
33
|
+
|
|
34
|
+
pgdb.close(); //optional
|
|
35
|
+
|
|
36
|
+
})().catch(console.error)
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
Typescript should get typing definition from npm package, but if doesn't you can add with typings:
|
|
40
|
+
```sh
|
|
41
|
+
typings install pogi=github:holdfenytolvaj/pogi/lib/index.d.ts --save
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
## Why we need +1?
|
|
45
|
+
Since wanted to keep things simple (and use Postgre full power as much as possible), ORMs were out of consideration. pg.js on the other
|
|
46
|
+
hand was too basic, still required a lot of boiler plate code. MassiveJs looked promising, but
|
|
47
|
+
there again too much restriction applied (no pool, no logger, no typescript, no transaction, no mixing of
|
|
48
|
+
relational and jsonb columns (not safely at least), etc) and adding these were not possible without redesign.
|
package/docs/logger.md
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
## Logging
|
|
2
|
+
|
|
3
|
+
The logger need to implement the following interface:
|
|
4
|
+
|
|
5
|
+
``` js
|
|
6
|
+
export interface PgDbLogger {
|
|
7
|
+
log: Function;
|
|
8
|
+
error: Function;
|
|
9
|
+
}
|
|
10
|
+
```
|
|
11
|
+
|
|
12
|
+
where log most of the time is called with 3 params:
|
|
13
|
+
1. sql query
|
|
14
|
+
2. parameter array if any
|
|
15
|
+
3. connection id (from the pool)
|
|
16
|
+
|
|
17
|
+
It is possible to specify separate loggers per schemas (and tables or queries). E.g.
|
|
18
|
+
```js
|
|
19
|
+
let myDbLogger = console;
|
|
20
|
+
let mySchemaLogger = {log:()=>{}, error:console.error};
|
|
21
|
+
let myTableLogger = {log:(sql, params, connectionId) => (connectionId) ? console.log('[',connectionId,']', sql,' < ',params) : console.log(sql),
|
|
22
|
+
error:console.error};
|
|
23
|
+
let myQueryLogger = console;
|
|
24
|
+
|
|
25
|
+
pgdb.setLogger(myDbLogger); //default console
|
|
26
|
+
pgdb.myschema.setLogger(mySchemaLogger);
|
|
27
|
+
pgdb.myschema.users.setLogger(myTableLogger);
|
|
28
|
+
|
|
29
|
+
pgdb.run('SELECT password FROM myschema.users'); //logged by myDbLogger
|
|
30
|
+
pgdb.myschema.run('SELECT password FROM myschema.users'); //logged by mySchemaLogger
|
|
31
|
+
pgdb.myschema.machines.findAll(); //logged by mySchemaLogger
|
|
32
|
+
|
|
33
|
+
pgdb.myschema.users.findAll(); //logged by myTableLogger
|
|
34
|
+
|
|
35
|
+
pgdb.myschema.users.findAll({logger:myQueryLogger}); //logged by myQueryLogger
|
|
36
|
+
```
|
|
37
|
+
The first logger found will be the one to be used.
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
##Type conversion
|
|
2
|
+
|
|
3
|
+
By default all field returned as a string from postgre. Pg.js convert some of it, e.g. dates
|
|
4
|
+
to js types, pgdb will convert a bit more e.g. dates arrays, number arrays, bigInts,
|
|
5
|
+
|
|
6
|
+
### The simplest way
|
|
7
|
+
|
|
8
|
+
```ts
|
|
9
|
+
var numWithValidation = val => {
|
|
10
|
+
let v = +val;
|
|
11
|
+
if (v > Number.MAX_SAFE_INTEGER || v < Number.MIN_SAFE_INTEGER) {
|
|
12
|
+
throw Error("Number can't be represented in javascript precisely: " + val);
|
|
13
|
+
}
|
|
14
|
+
return v;
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
await pgdb.setTypeParser('int8', numWithValidation, 'myschema');
|
|
18
|
+
//leaving out 'myschema' would apply to all schemas
|
|
19
|
+
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
### Looks simple, but...
|
|
23
|
+
pg.js doesn't handle well the exception during type conversion.
|
|
24
|
+
If exception is thrown the node process will exit. So you can add your converter
|
|
25
|
+
to pgdb layer if exception is possible. Note the 'PgDb' in the function name.
|
|
26
|
+
|
|
27
|
+
```ts
|
|
28
|
+
await pgdb.setPgDbTypeParser('int8', numWithValidation);
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
### complex types and complex type arrays
|
|
32
|
+
Complex type looks promising instead of link tables, but unfortunately
|
|
33
|
+
they do not have to foreign key check at the moment in PostgreSQL(9.6).
|
|
34
|
+
Also right now we didn't put much effort to this feature, we rather use jsonb columns instead.
|
|
35
|
+
But it would be relative easy to add support for them to save and read as js objects (and might be support the operators).
|
|
36
|
+
|
|
37
|
+
####Example complex type:
|
|
38
|
+
```sql
|
|
39
|
+
CREATE TYPE "permissionType" AS ENUM ('read', 'write', 'admin');
|
|
40
|
+
CREATE TYPE "permissionForResourceType" AS (
|
|
41
|
+
"permission" "permissionType",
|
|
42
|
+
"resource" "text"
|
|
43
|
+
);
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
####Add parsing to list:
|
|
47
|
+
```js
|
|
48
|
+
function parseComplexType(str) {
|
|
49
|
+
//cut of '(', ')'
|
|
50
|
+
str = str.substring(1, str.length-1);
|
|
51
|
+
let e = /"((?:[^"]|"")*)"(?:,|$)|([^,]*)(?:,|$)/g;
|
|
52
|
+
let valList = [];
|
|
53
|
+
let parsingResult;
|
|
54
|
+
let valStr;
|
|
55
|
+
let hasNextValue;
|
|
56
|
+
/**
|
|
57
|
+
* parsingResult.index<str.length check for finish is not reliable
|
|
58
|
+
* as if the last value is null it goes undetected, e.g. (,,)
|
|
59
|
+
*/
|
|
60
|
+
do {
|
|
61
|
+
parsingResult = e.exec(str);
|
|
62
|
+
valStr = (parsingResult[0]=='' || parsingResult[0]==',' || parsingResult[2]=='null') ? null : parsingResult[1] || parsingResult[2] ;
|
|
63
|
+
if (parsingResult[0]=='"",' || parsingResult[0]=='""') {
|
|
64
|
+
valStr = '';
|
|
65
|
+
}
|
|
66
|
+
valList.push(valStr ? valStr.replace(/""/g,'"') : valStr);
|
|
67
|
+
hasNextValue = parsingResult[0].substring(parsingResult[0].length-1,parsingResult[0].length)==',';
|
|
68
|
+
} while (hasNextValue);
|
|
69
|
+
return valList;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
await pgdb.setTypeParser('permissionForResourceType', parseComplexType, 'myschema');
|
|
73
|
+
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
####Add parsing for array:
|
|
77
|
+
```js
|
|
78
|
+
function parseComplexTypeArray(str) {
|
|
79
|
+
let list = JSON.parse('[' + str.substring(1, str.length - 1) + ']');
|
|
80
|
+
|
|
81
|
+
let result = [];
|
|
82
|
+
for (let elementStr of list) {
|
|
83
|
+
result.push(parseComplexType(elementStr));
|
|
84
|
+
}
|
|
85
|
+
return result;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
await pgdb.setTypeParser('_permissionForResourceType', parseComplexTypeArray, 'myschema');
|
|
89
|
+
```
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
## Notification
|
|
2
|
+
as simple as:
|
|
3
|
+
```js
|
|
4
|
+
let result = '';
|
|
5
|
+
await db.listen('channel', (data) => { result += data.payload; });
|
|
6
|
+
await db.listen('channel', () => { result += ',nextCallback'; });
|
|
7
|
+
|
|
8
|
+
await db.run(`NOTIFY channel, 'data'`);
|
|
9
|
+
//same as: await db.notify('channel', 'data');
|
|
10
|
+
//result will be: 'data,nextCallback'
|
|
11
|
+
|
|
12
|
+
await db.unlisten('channel');
|
|
13
|
+
//dedicated listener connection now released.
|
|
14
|
+
```
|
|
15
|
+
|
|
16
|
+
See [Postgresql documentation](https://www.postgresql.org/docs/current/sql-notify.html)
|
|
17
|
+
|
|
18
|
+
## Some comments
|
|
19
|
+
Notification listeners uses a dedicated connection. If e.g. the postgresql server restarts, some notifications might not be received, but the connection and the listeners will be re-created.
|
package/docs/pitfalls.md
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
|
|
2
|
+
## Known pitfalls
|
|
3
|
+
>Nothing is without pitfalls, but for most libraries it's well hidden...
|
|
4
|
+
|
|
5
|
+
### Named parameter + `column in ()` expression
|
|
6
|
+
|
|
7
|
+
```
|
|
8
|
+
let dates = ['2000-01-01','2000-02-02'];
|
|
9
|
+
//don't work:
|
|
10
|
+
await pgdb.query(`SELECT * FROM ${dbTable} b WHERE DATE(b.from) IN :dates`, {dates});
|
|
11
|
+
|
|
12
|
+
//work:
|
|
13
|
+
await pgdb.query(`SELECT * FROM ${dbTable} b WHERE ARRAY[DATE(b.from)] && :dates`, {dates});
|
|
14
|
+
```
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
### postgre data types vs javascript types - general
|
|
18
|
+
pg.js is a powerful library but left many decision to the user, e.g. converting types.
|
|
19
|
+
By default it doesn't convert arrays or integers, dates are also tricky. We've added some basic
|
|
20
|
+
types, but rarely used types (e.g. date-ranges, custom complex types) still need to be converted.
|
|
21
|
+
|
|
22
|
+
### postgre data types vs javascript types - number
|
|
23
|
+
Javacript does not handle integers precisely out of range [Number.MIN_SAFE_INTEGER, Number.MAX_SAFE_INTEGER],
|
|
24
|
+
and this is why it is not converted by default by pg.js, but its rarely reached,
|
|
25
|
+
thus it is safe to convert by default, and just throw an exception when it is above that number (9007199254740991);
|
|
26
|
+
|
|
27
|
+
### postgre data types vs javascript types - date
|
|
28
|
+
e.g. when you write your own queries, do not do this:
|
|
29
|
+
```js
|
|
30
|
+
let date = new Date().toISOString();
|
|
31
|
+
await pgdb.query(`SELECT * FROM users where created>'${date}'`);
|
|
32
|
+
```
|
|
33
|
+
this equals to (or something similar depending on your timezone):
|
|
34
|
+
```sql
|
|
35
|
+
select * from users where created>'1999-12-31T18:00:00.000Z';
|
|
36
|
+
```
|
|
37
|
+
it will have some side effect as postgre will compare them as a string, so you need to enforce the conversion:
|
|
38
|
+
```js
|
|
39
|
+
await pgdb.query(`SELECT * FROM users where created>'${date}'::timestamptz`);
|
|
40
|
+
//or
|
|
41
|
+
await userTable.find({'created >':date});
|
|
42
|
+
await userTable.find({'created >': new Date()});//no need for toISOString()
|
|
43
|
+
```
|
|
44
|
+
this equals to:
|
|
45
|
+
```sql
|
|
46
|
+
select * from users where created>'2000-01-01 00:00:00';
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
### Table or column type change / truncate
|
|
50
|
+
If a column type changes or truncates occur (truncate needs a double check, it might be possible),
|
|
51
|
+
the postgres data type's oid numbers might changes, after
|
|
52
|
+
these queries the pgdb needs to rerun the initial column type queries.
|
|
53
|
+
This can be done as easy as
|
|
54
|
+
```js
|
|
55
|
+
await pgdb.reload();
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
### Results' object constructor
|
|
59
|
+
Result objects have a special constructor that are not callable outside pg.js.
|
|
60
|
+
It's rarely an issue, except e.g. if you use xlsx npm package, where after cloning
|
|
61
|
+
the object, it calls the object's constructor it can cause some issues
|
|
62
|
+
|
|
63
|
+
### Connectivity(?)
|
|
64
|
+
See [pg-ka-fix](https://github.com/numminorihsf/pg-ka-fix). Haven't met with the issue,
|
|
65
|
+
but need to investigate, leave it here as a possible issue.
|
|
66
|
+
|
|
67
|
+
### Field name collision
|
|
68
|
+
Result fields do not keep their table alias references, so the following query will result
|
|
69
|
+
in name collision:
|
|
70
|
+
```js
|
|
71
|
+
await pgdb.query(`select u1.id, u2.id from ${table} u1 left join ${table} u2 ON true `);
|
|
72
|
+
```
|
|
73
|
+
not a big issue, just needs aliases, but keep in mind. In case this happens an exception will be thrown.
|
package/docs/streams.md
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
#Example
|
|
2
|
+
|
|
3
|
+
##Use stream instead of e.g.:
|
|
4
|
+
```ts
|
|
5
|
+
let total = await table.count();
|
|
6
|
+
for (let offset = 0; offset < total; offset += 100)) {
|
|
7
|
+
let list = await table.findAll({offset, limit:100});
|
|
8
|
+
for (let row of list) {
|
|
9
|
+
...
|
|
10
|
+
//unless you have a stable ordering and use it in the query above
|
|
11
|
+
//you might get some rows multiple times here...
|
|
12
|
+
//use streams instead
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
##Using queryAsStream
|
|
18
|
+
```ts
|
|
19
|
+
|
|
20
|
+
let stream = await pgdb.queryAsStream(`SELECT * FROM generate_series(0, 1001) num`);
|
|
21
|
+
|
|
22
|
+
stream.on('data', (c: any)=> {
|
|
23
|
+
//do or not do stuff
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
await new Promise((resolve, reject)=> {
|
|
27
|
+
stream.on('end', resolve);
|
|
28
|
+
stream.on('error', reject);
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
##Using find
|
|
34
|
+
|
|
35
|
+
find/findAll/findWhere all can be used to return a stream, passing it to the options.
|
|
36
|
+
|
|
37
|
+
```ts
|
|
38
|
+
|
|
39
|
+
let stream;
|
|
40
|
+
stream = await pgdb.users.find({'id >':1}, {stream:true});
|
|
41
|
+
stream = await pgdb.users.findAll({stream:true});
|
|
42
|
+
stream = await pgdb.users.findWhere('true', null, {stream:true});
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
##Using async in stream
|
|
48
|
+
```ts
|
|
49
|
+
let stream = await pgdb.users.find({'id >':1}, {stream:true});
|
|
50
|
+
stream.on('data', (user: User)=> {
|
|
51
|
+
stream.pause();
|
|
52
|
+
|
|
53
|
+
(async function() {
|
|
54
|
+
...
|
|
55
|
+
await mightDoSth(user);
|
|
56
|
+
...
|
|
57
|
+
stream.resume();
|
|
58
|
+
})().catch(e=>{
|
|
59
|
+
console.error(e);
|
|
60
|
+
stream.emit( "error", e);
|
|
61
|
+
});
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
await new Promise((resolve, reject)=> {
|
|
65
|
+
stream.on('end', resolve);
|
|
66
|
+
stream.on('error', reject);
|
|
67
|
+
});
|
|
68
|
+
```
|