@graffy/pg 0.15.17 → 0.15.19-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/Readme.md CHANGED
@@ -2,19 +2,41 @@
2
2
 
3
3
  The standard Postgres module for Graffy. Each instance this module mounts a Postgres table as a Graffy subtree.
4
4
 
5
+ Requires the [pg](https://github.com/brianc/node-postgres) library to be installed as a peer dependency.
6
+
5
7
  ## Usage
6
8
 
7
9
  ```js
8
10
  import pg from '@graffy/pg';
11
+ import Graffy from '@graffy/core';
12
+ import link from '@graffy/link';
9
13
 
10
- graffyStore.use(pg(options));
14
+ const store = new Graffy();
15
+ store.use(path, pg(options));
11
16
  ```
12
17
 
13
- Uses the [pg](https://github.com/brianc/node-postgres) library.
18
+ ### Options
19
+
20
+ - `table`, the name of the table. If not provided, the last segment of the `path` is used. This table must exist.
21
+ - `idCol`: the name of the column to use as ID. Defaults to `id`. This column must exist and be the primary key or have a unique constraint.
22
+ - `verCol`: the name of the column to store the Graffy version number. This column must exist, and must have a `DEFAULT` SQL expression defined - this expression is evaluated to calculate the version number. Graffy versions must monotonically increase, so this expression is typically based on `CURRENT_TIMESTAMP`.
23
+ - `connection`: a [pg](https://github.com/brianc/node-postgres) Client or Pool object (recommended), or the arguments for constructing a new Pool object. Optional.
24
+
25
+ ### Database connection
26
+
27
+ Graffy Postgres can be configured to use a specific pg.Client object on a per-request basis, by including a `pgClient` property on the read or write options. This is useful for implementing transactions, partitioning, row-level security, etc.
28
+
29
+ If no `pgClient` is provided for a particular operation, Graffy Postgres falls back to the "global" pg.Client or pg.Pool object defined in the `connection` parameter in the initialization options. If no `connection` parameter was passed, a new pg.Pool will be created using PSQL environment variables.
30
+
31
+ ## Data model
32
+
33
+ Graffy Postgres interprets each property as the name of a column, except for `$count`, `$sum` etc. as described in the aggregation section below.
34
+
35
+ It also interprets the `$key` as specifying filtering, sorting, pagination and aggregation parameters.
14
36
 
15
- ### Query filters
37
+ ### Filters
16
38
 
17
- Query filters are passed in `$key` and are JSON-based, somewhat like MongoDB.
39
+ Query filters are JSON-based, somewhat like MongoDB.
18
40
 
19
41
  1. Filters expressions follow a **property**, **operator**, **value** order. Values are scalar values (strings or numbers).
20
42
  2. Property names are always object keys. They may be strings with dots `.`.
package/index.cjs CHANGED
@@ -158,7 +158,6 @@ function simplify(node) {
158
158
  }
159
159
  return node;
160
160
  }
161
- const nowTimestamp = sql__default["default"]`cast(extract(epoch from now()) as integer)`;
162
161
  const getJsonBuildTrusted = (variadic) => {
163
162
  const args = sql.join(Object.entries(variadic).map(([name, value]) => {
164
163
  return sql__default["default"]`'${sql.raw(name)}', ${getJsonBuildValue(value)}`;
@@ -226,14 +225,14 @@ function castValue(value, type, name, isPut) {
226
225
  const getInsert = (row, options) => {
227
226
  const cols = [];
228
227
  const vals = [];
229
- Object.entries(row).filter(([col]) => col !== options.verCol && col[0] !== "$").concat([[options.verCol, nowTimestamp]]).forEach(([col, val]) => {
228
+ Object.entries(row).filter(([col]) => col !== options.verCol && col[0] !== "$").concat([[options.verCol, sql__default["default"]`default`]]).forEach(([col, val]) => {
230
229
  cols.push(sql__default["default"]`"${sql.raw(col)}"`);
231
230
  vals.push(castValue(val, options.schema.types[col], col, row.$put));
232
231
  });
233
232
  return { cols: sql.join(cols, ", "), vals: sql.join(vals, ", ") };
234
233
  };
235
234
  const getUpdates = (row, options) => {
236
- return sql.join(Object.entries(row).filter(([col]) => col !== options.idCol && col[0] !== "$").map(([col, val]) => sql__default["default"]`"${sql.raw(col)}" = ${castValue(val, options.schema.types[col], col, row.$put)}`).concat(sql__default["default"]`"${sql.raw(options.verCol)}" = ${nowTimestamp}`), ", ");
235
+ return sql.join(Object.entries(row).filter(([col]) => col !== options.idCol && col[0] !== "$").map(([col, val]) => sql__default["default"]`"${sql.raw(col)}" = ${castValue(val, options.schema.types[col], col, row.$put)}`).concat(sql__default["default"]`"${sql.raw(options.verCol)}" = default`), ", ");
237
236
  };
238
237
  function getJsonUpdate(object, col, path) {
239
238
  if (!object || typeof object !== "object" || Array.isArray(object) || object.$put) {
@@ -325,18 +324,18 @@ function getSql(filter, options) {
325
324
  }
326
325
  return getNodeSql(getAst(filter));
327
326
  }
328
- const getIdMeta = ({ idCol }) => getJsonBuildTrusted({
327
+ const getIdMeta = ({ idCol, verDefault }) => getJsonBuildTrusted({
329
328
  $key: sql__default["default"]`"${sql.raw(idCol)}"`,
330
- $ver: nowTimestamp
329
+ $ver: sql.raw(verDefault)
331
330
  });
332
- const getArgMeta = (key, prefix, idCol) => getJsonBuildTrusted({
331
+ const getArgMeta = (key, { prefix, idCol, verDefault }) => getJsonBuildTrusted({
333
332
  $key: key,
334
333
  $ref: sql__default["default"]`jsonb_build_array(${sql.join(prefix.map((k) => sql__default["default"]`${k}::text`))}, "${sql.raw(idCol)}")`,
335
- $ver: nowTimestamp
334
+ $ver: sql.raw(verDefault)
336
335
  });
337
- const getAggMeta = (key, $group) => getJsonBuildTrusted({
336
+ const getAggMeta = (key, $group, { verDefault }) => getJsonBuildTrusted({
338
337
  $key: sql.join([key, getJsonBuildTrusted({ $group })].filter(Boolean), " || "),
339
- $ver: nowTimestamp
338
+ $ver: sql.raw(verDefault)
340
339
  });
341
340
  function getArgSql(_a, options) {
342
341
  var _b = _a, { $first, $last, $after, $before, $since, $until, $all, $cursor: _ } = _b, rest = __objRest(_b, ["$first", "$last", "$after", "$before", "$since", "$until", "$all", "$cursor"]);
@@ -345,7 +344,7 @@ function getArgSql(_a, options) {
345
344
  if ($order && $group) {
346
345
  throw Error("pg_arg.order_and_group_unsupported in " + prefix);
347
346
  }
348
- const meta = (key2) => $group ? getAggMeta(key2, $group) : getArgMeta(key2, prefix, idCol);
347
+ const meta = (key2) => $group ? getAggMeta(key2, $group, options) : getArgMeta(key2, options);
349
348
  const groupCols = Array.isArray($group) && $group.length && $group.map(lookup);
350
349
  const group = groupCols ? sql.join(groupCols, ", ") : void 0;
351
350
  const hasRangeArg = $before || $after || $since || $until || $first || $last || $all || $order;
@@ -528,24 +527,34 @@ class Db {
528
527
  async ensureSchema(tableOptions) {
529
528
  if (tableOptions.schema)
530
529
  return;
531
- const { table } = tableOptions;
532
- const types = (await this.query(sql__default["default"]`SELECT jsonb_object_agg(
533
- column_name,
534
- udt_name
535
- )
536
- FROM information_schema.columns
537
- WHERE
530
+ const { table, verCol } = tableOptions;
531
+ const tableSchema = (await this.query(sql__default["default"]`
532
+ SELECT table_schema
533
+ FROM information_schema.tables
534
+ WHERE table_name = ${table}
535
+ ORDER BY array_position(current_schemas(false)::text[], table_schema::text) ASC
536
+ LIMIT 1`)).rows[0][0];
537
+ const types = (await this.query(sql__default["default"]`
538
+ SELECT jsonb_object_agg(column_name, udt_name)
539
+ FROM information_schema.columns
540
+ WHERE
538
541
  table_name = ${table} AND
539
- table_schema = (
540
- SELECT table_schema
541
- FROM information_schema.tables
542
- WHERE table_name = ${table}
543
- ORDER BY array_position(current_schemas(false)::text[], table_schema::text) ASC
544
- LIMIT 1)`)).rows[0][0];
542
+ table_schema = ${tableSchema}`)).rows[0][0];
545
543
  if (!types)
546
544
  throw Error(`pg.missing_table ${table}`);
545
+ const verDefault = (await this.query(sql__default["default"]`
546
+ SELECT column_default
547
+ FROM information_schema.columns
548
+ WHERE
549
+ table_name = ${table} AND
550
+ table_schema = ${tableSchema} AND
551
+ column_name = ${verCol}`)).rows[0][0];
552
+ if (!verDefault) {
553
+ throw Error(`pg.verCol_without_default ${verCol}`);
554
+ }
547
555
  log("ensureSchema", types);
548
556
  tableOptions.schema = { types };
557
+ tableOptions.verDefault = verDefault;
549
558
  }
550
559
  async read(rootQuery, tableOptions) {
551
560
  const idQueries = {};
@@ -621,7 +630,7 @@ class Db {
621
630
  return result;
622
631
  }
623
632
  }
624
- const pg = ({ table, idCol, verCol, connection, schema }) => (store) => {
633
+ const pg = ({ table, idCol, verCol, connection, schema, verDefault }) => (store) => {
625
634
  store.on("read", read);
626
635
  store.on("write", write);
627
636
  const prefix = store.path;
@@ -630,7 +639,8 @@ const pg = ({ table, idCol, verCol, connection, schema }) => (store) => {
630
639
  table: table || prefix[prefix.length - 1] || "default",
631
640
  idCol: idCol || "id",
632
641
  verCol: verCol || "updatedAt",
633
- schema
642
+ schema,
643
+ verDefault
634
644
  };
635
645
  const defaultDb = new Db(connection);
636
646
  function read(query, options, next) {
package/index.mjs CHANGED
@@ -150,7 +150,6 @@ function simplify(node) {
150
150
  }
151
151
  return node;
152
152
  }
153
- const nowTimestamp = sql`cast(extract(epoch from now()) as integer)`;
154
153
  const getJsonBuildTrusted = (variadic) => {
155
154
  const args = join(Object.entries(variadic).map(([name, value]) => {
156
155
  return sql`'${raw(name)}', ${getJsonBuildValue(value)}`;
@@ -218,14 +217,14 @@ function castValue(value, type, name, isPut) {
218
217
  const getInsert = (row, options) => {
219
218
  const cols = [];
220
219
  const vals = [];
221
- Object.entries(row).filter(([col]) => col !== options.verCol && col[0] !== "$").concat([[options.verCol, nowTimestamp]]).forEach(([col, val]) => {
220
+ Object.entries(row).filter(([col]) => col !== options.verCol && col[0] !== "$").concat([[options.verCol, sql`default`]]).forEach(([col, val]) => {
222
221
  cols.push(sql`"${raw(col)}"`);
223
222
  vals.push(castValue(val, options.schema.types[col], col, row.$put));
224
223
  });
225
224
  return { cols: join(cols, ", "), vals: join(vals, ", ") };
226
225
  };
227
226
  const getUpdates = (row, options) => {
228
- return join(Object.entries(row).filter(([col]) => col !== options.idCol && col[0] !== "$").map(([col, val]) => sql`"${raw(col)}" = ${castValue(val, options.schema.types[col], col, row.$put)}`).concat(sql`"${raw(options.verCol)}" = ${nowTimestamp}`), ", ");
227
+ return join(Object.entries(row).filter(([col]) => col !== options.idCol && col[0] !== "$").map(([col, val]) => sql`"${raw(col)}" = ${castValue(val, options.schema.types[col], col, row.$put)}`).concat(sql`"${raw(options.verCol)}" = default`), ", ");
229
228
  };
230
229
  function getJsonUpdate(object, col, path) {
231
230
  if (!object || typeof object !== "object" || Array.isArray(object) || object.$put) {
@@ -317,18 +316,18 @@ function getSql(filter, options) {
317
316
  }
318
317
  return getNodeSql(getAst(filter));
319
318
  }
320
- const getIdMeta = ({ idCol }) => getJsonBuildTrusted({
319
+ const getIdMeta = ({ idCol, verDefault }) => getJsonBuildTrusted({
321
320
  $key: sql`"${raw(idCol)}"`,
322
- $ver: nowTimestamp
321
+ $ver: raw(verDefault)
323
322
  });
324
- const getArgMeta = (key, prefix, idCol) => getJsonBuildTrusted({
323
+ const getArgMeta = (key, { prefix, idCol, verDefault }) => getJsonBuildTrusted({
325
324
  $key: key,
326
325
  $ref: sql`jsonb_build_array(${join(prefix.map((k) => sql`${k}::text`))}, "${raw(idCol)}")`,
327
- $ver: nowTimestamp
326
+ $ver: raw(verDefault)
328
327
  });
329
- const getAggMeta = (key, $group) => getJsonBuildTrusted({
328
+ const getAggMeta = (key, $group, { verDefault }) => getJsonBuildTrusted({
330
329
  $key: join([key, getJsonBuildTrusted({ $group })].filter(Boolean), " || "),
331
- $ver: nowTimestamp
330
+ $ver: raw(verDefault)
332
331
  });
333
332
  function getArgSql(_a, options) {
334
333
  var _b = _a, { $first, $last, $after, $before, $since, $until, $all, $cursor: _ } = _b, rest = __objRest(_b, ["$first", "$last", "$after", "$before", "$since", "$until", "$all", "$cursor"]);
@@ -337,7 +336,7 @@ function getArgSql(_a, options) {
337
336
  if ($order && $group) {
338
337
  throw Error("pg_arg.order_and_group_unsupported in " + prefix);
339
338
  }
340
- const meta = (key2) => $group ? getAggMeta(key2, $group) : getArgMeta(key2, prefix, idCol);
339
+ const meta = (key2) => $group ? getAggMeta(key2, $group, options) : getArgMeta(key2, options);
341
340
  const groupCols = Array.isArray($group) && $group.length && $group.map(lookup);
342
341
  const group = groupCols ? join(groupCols, ", ") : void 0;
343
342
  const hasRangeArg = $before || $after || $since || $until || $first || $last || $all || $order;
@@ -520,24 +519,34 @@ class Db {
520
519
  async ensureSchema(tableOptions) {
521
520
  if (tableOptions.schema)
522
521
  return;
523
- const { table } = tableOptions;
524
- const types = (await this.query(sql`SELECT jsonb_object_agg(
525
- column_name,
526
- udt_name
527
- )
528
- FROM information_schema.columns
529
- WHERE
522
+ const { table, verCol } = tableOptions;
523
+ const tableSchema = (await this.query(sql`
524
+ SELECT table_schema
525
+ FROM information_schema.tables
526
+ WHERE table_name = ${table}
527
+ ORDER BY array_position(current_schemas(false)::text[], table_schema::text) ASC
528
+ LIMIT 1`)).rows[0][0];
529
+ const types = (await this.query(sql`
530
+ SELECT jsonb_object_agg(column_name, udt_name)
531
+ FROM information_schema.columns
532
+ WHERE
530
533
  table_name = ${table} AND
531
- table_schema = (
532
- SELECT table_schema
533
- FROM information_schema.tables
534
- WHERE table_name = ${table}
535
- ORDER BY array_position(current_schemas(false)::text[], table_schema::text) ASC
536
- LIMIT 1)`)).rows[0][0];
534
+ table_schema = ${tableSchema}`)).rows[0][0];
537
535
  if (!types)
538
536
  throw Error(`pg.missing_table ${table}`);
537
+ const verDefault = (await this.query(sql`
538
+ SELECT column_default
539
+ FROM information_schema.columns
540
+ WHERE
541
+ table_name = ${table} AND
542
+ table_schema = ${tableSchema} AND
543
+ column_name = ${verCol}`)).rows[0][0];
544
+ if (!verDefault) {
545
+ throw Error(`pg.verCol_without_default ${verCol}`);
546
+ }
539
547
  log("ensureSchema", types);
540
548
  tableOptions.schema = { types };
549
+ tableOptions.verDefault = verDefault;
541
550
  }
542
551
  async read(rootQuery, tableOptions) {
543
552
  const idQueries = {};
@@ -613,7 +622,7 @@ class Db {
613
622
  return result;
614
623
  }
615
624
  }
616
- const pg = ({ table, idCol, verCol, connection, schema }) => (store) => {
625
+ const pg = ({ table, idCol, verCol, connection, schema, verDefault }) => (store) => {
617
626
  store.on("read", read);
618
627
  store.on("write", write);
619
628
  const prefix = store.path;
@@ -622,7 +631,8 @@ const pg = ({ table, idCol, verCol, connection, schema }) => (store) => {
622
631
  table: table || prefix[prefix.length - 1] || "default",
623
632
  idCol: idCol || "id",
624
633
  verCol: verCol || "updatedAt",
625
- schema
634
+ schema,
635
+ verDefault
626
636
  };
627
637
  const defaultDb = new Db(connection);
628
638
  function read(query, options, next) {
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "@graffy/pg",
3
3
  "description": "The standard Postgres module for Graffy. Each instance this module mounts a Postgres table as a Graffy subtree.",
4
4
  "author": "aravind (https://github.com/aravindet)",
5
- "version": "0.15.17",
5
+ "version": "0.15.19-alpha.2",
6
6
  "main": "./index.cjs",
7
7
  "exports": {
8
8
  "import": "./index.mjs",
@@ -16,7 +16,7 @@
16
16
  },
17
17
  "license": "Apache-2.0",
18
18
  "dependencies": {
19
- "@graffy/common": "0.15.17",
19
+ "@graffy/common": "0.15.19-alpha.2",
20
20
  "sql-template-tag": "^4.1.0",
21
21
  "debug": "^4.3.3"
22
22
  },
package/types/index.d.ts CHANGED
@@ -1,7 +1,8 @@
1
- export function pg({ table, idCol, verCol, connection, schema }: {
1
+ export function pg({ table, idCol, verCol, connection, schema, verDefault }: {
2
2
  table?: string;
3
3
  idCol?: string;
4
4
  verCol?: string;
5
5
  connection?: any;
6
6
  schema?: any;
7
+ verDefault?: string;
7
8
  }): (store: any) => void;
@@ -1,5 +1,4 @@
1
1
  export function vertexSql(array: any): Sql;
2
- export const nowTimestamp: Sql;
3
2
  export function getJsonBuildTrusted(variadic: any): Sql;
4
3
  export function lookup(prop: any): Sql;
5
4
  export function getSelectCols(table: any, projection?: any): Sql;
@@ -1,5 +1,12 @@
1
- export function getIdMeta({ idCol }: {
1
+ export function getIdMeta({ idCol, verDefault }: {
2
2
  idCol: any;
3
+ verDefault: any;
4
+ }): import("sql-template-tag").Sql;
5
+ export function getArgMeta(key: any, { prefix, idCol, verDefault }: {
6
+ prefix: any;
7
+ idCol: any;
8
+ verDefault: any;
9
+ }): import("sql-template-tag").Sql;
10
+ export function getAggMeta(key: any, $group: any, { verDefault }: {
11
+ verDefault: any;
3
12
  }): import("sql-template-tag").Sql;
4
- export function getArgMeta(key: any, prefix: any, idCol: any): import("sql-template-tag").Sql;
5
- export function getAggMeta(key: any, $group: any): import("sql-template-tag").Sql;