prostgles-server 4.2.160 → 4.2.161
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/Auth/AuthHandler.ts +436 -0
- package/lib/Auth/AuthTypes.ts +280 -0
- package/lib/Auth/getSafeReturnURL.ts +35 -0
- package/lib/Auth/sendEmail.ts +83 -0
- package/lib/Auth/setAuthProviders.ts +128 -0
- package/lib/Auth/setEmailProvider.ts +85 -0
- package/lib/Auth/setupAuthRoutes.ts +161 -0
- package/lib/DBEventsManager.ts +178 -0
- package/lib/DBSchemaBuilder.ts +225 -0
- package/lib/DboBuilder/DboBuilder.ts +319 -0
- package/lib/DboBuilder/DboBuilderTypes.ts +361 -0
- package/lib/DboBuilder/QueryBuilder/Functions.ts +1153 -0
- package/lib/DboBuilder/QueryBuilder/QueryBuilder.ts +288 -0
- package/lib/DboBuilder/QueryBuilder/getJoinQuery.ts +263 -0
- package/lib/DboBuilder/QueryBuilder/getNewQuery.ts +271 -0
- package/lib/DboBuilder/QueryBuilder/getSelectQuery.ts +136 -0
- package/lib/DboBuilder/QueryBuilder/prepareHaving.ts +22 -0
- package/lib/DboBuilder/QueryStreamer.ts +250 -0
- package/lib/DboBuilder/TableHandler/DataValidator.ts +428 -0
- package/lib/DboBuilder/TableHandler/TableHandler.ts +205 -0
- package/lib/DboBuilder/TableHandler/delete.ts +115 -0
- package/lib/DboBuilder/TableHandler/insert.ts +183 -0
- package/lib/DboBuilder/TableHandler/insertTest.ts +78 -0
- package/lib/DboBuilder/TableHandler/onDeleteFromFileTable.ts +62 -0
- package/lib/DboBuilder/TableHandler/runInsertUpdateQuery.ts +134 -0
- package/lib/DboBuilder/TableHandler/update.ts +126 -0
- package/lib/DboBuilder/TableHandler/updateBatch.ts +49 -0
- package/lib/DboBuilder/TableHandler/updateFile.ts +48 -0
- package/lib/DboBuilder/TableHandler/upsert.ts +34 -0
- package/lib/DboBuilder/ViewHandler/ViewHandler.ts +393 -0
- package/lib/DboBuilder/ViewHandler/count.ts +38 -0
- package/lib/DboBuilder/ViewHandler/find.ts +153 -0
- package/lib/DboBuilder/ViewHandler/getExistsCondition.ts +73 -0
- package/lib/DboBuilder/ViewHandler/getExistsFilters.ts +74 -0
- package/lib/DboBuilder/ViewHandler/getInfo.ts +32 -0
- package/lib/DboBuilder/ViewHandler/getTableJoinQuery.ts +84 -0
- package/lib/DboBuilder/ViewHandler/parseComplexFilter.ts +96 -0
- package/lib/DboBuilder/ViewHandler/parseFieldFilter.ts +105 -0
- package/lib/DboBuilder/ViewHandler/parseJoinPath.ts +208 -0
- package/lib/DboBuilder/ViewHandler/prepareSortItems.ts +163 -0
- package/lib/DboBuilder/ViewHandler/prepareWhere.ts +90 -0
- package/lib/DboBuilder/ViewHandler/size.ts +37 -0
- package/lib/DboBuilder/ViewHandler/subscribe.ts +118 -0
- package/lib/DboBuilder/ViewHandler/validateViewRules.ts +70 -0
- package/lib/DboBuilder/dboBuilderUtils.ts +222 -0
- package/lib/DboBuilder/getColumns.ts +114 -0
- package/lib/DboBuilder/getCondition.ts +201 -0
- package/lib/DboBuilder/getSubscribeRelatedTables.ts +190 -0
- package/lib/DboBuilder/getTablesForSchemaPostgresSQL.ts +426 -0
- package/lib/DboBuilder/insertNestedRecords.ts +355 -0
- package/lib/DboBuilder/parseUpdateRules.ts +187 -0
- package/lib/DboBuilder/prepareShortestJoinPaths.ts +186 -0
- package/lib/DboBuilder/runSQL.ts +182 -0
- package/lib/DboBuilder/runTransaction.ts +50 -0
- package/lib/DboBuilder/sqlErrCodeToMsg.ts +254 -0
- package/lib/DboBuilder/uploadFile.ts +69 -0
- package/lib/Event_Trigger_Tags.ts +118 -0
- package/lib/FileManager/FileManager.ts +358 -0
- package/lib/FileManager/getValidatedFileType.ts +69 -0
- package/lib/FileManager/initFileManager.ts +187 -0
- package/lib/FileManager/upload.ts +62 -0
- package/lib/FileManager/uploadStream.ts +79 -0
- package/lib/Filtering.ts +463 -0
- package/lib/JSONBValidation/validate_jsonb_schema_sql.ts +502 -0
- package/lib/JSONBValidation/validation.ts +143 -0
- package/lib/Logging.ts +127 -0
- package/lib/PostgresNotifListenManager.ts +143 -0
- package/lib/Prostgles.ts +485 -0
- package/lib/ProstglesTypes.ts +196 -0
- package/lib/PubSubManager/PubSubManager.ts +609 -0
- package/lib/PubSubManager/addSub.ts +138 -0
- package/lib/PubSubManager/addSync.ts +141 -0
- package/lib/PubSubManager/getCreatePubSubManagerError.ts +72 -0
- package/lib/PubSubManager/getPubSubManagerInitQuery.ts +662 -0
- package/lib/PubSubManager/initPubSubManager.ts +79 -0
- package/lib/PubSubManager/notifListener.ts +173 -0
- package/lib/PubSubManager/orphanTriggerCheck.ts +70 -0
- package/lib/PubSubManager/pushSubData.ts +55 -0
- package/lib/PublishParser/PublishParser.ts +162 -0
- package/lib/PublishParser/getFileTableRules.ts +124 -0
- package/lib/PublishParser/getSchemaFromPublish.ts +141 -0
- package/lib/PublishParser/getTableRulesWithoutFileTable.ts +177 -0
- package/lib/PublishParser/publishTypesAndUtils.ts +399 -0
- package/lib/RestApi.ts +127 -0
- package/lib/SchemaWatch/SchemaWatch.ts +90 -0
- package/lib/SchemaWatch/createSchemaWatchEventTrigger.ts +3 -0
- package/lib/SchemaWatch/getValidatedWatchSchemaType.ts +45 -0
- package/lib/SchemaWatch/getWatchSchemaTagList.ts +27 -0
- package/lib/SyncReplication.ts +557 -0
- package/lib/TableConfig/TableConfig.ts +468 -0
- package/lib/TableConfig/getColumnDefinitionQuery.ts +111 -0
- package/lib/TableConfig/getConstraintDefinitionQueries.ts +95 -0
- package/lib/TableConfig/getFutureTableSchema.ts +64 -0
- package/lib/TableConfig/getPGIndexes.ts +53 -0
- package/lib/TableConfig/getTableColumnQueries.ts +129 -0
- package/lib/TableConfig/initTableConfig.ts +326 -0
- package/lib/index.ts +13 -0
- package/lib/initProstgles.ts +319 -0
- package/lib/onSocketConnected.ts +102 -0
- package/lib/runClientRequest.ts +129 -0
- package/lib/shortestPath.ts +122 -0
- package/lib/typeTests/DBoGenerated.d.ts +320 -0
- package/lib/typeTests/dboTypeCheck.ts +81 -0
- package/lib/utils.ts +15 -0
- package/package.json +1 -1
|
@@ -0,0 +1,1153 @@
|
|
|
1
|
+
import { asName, ColumnInfo, isEmpty, isObject, PG_COLUMN_UDT_DATA_TYPE, TextFilter_FullTextSearchFilterKeys } from "prostgles-types";
|
|
2
|
+
import { isPlainObject, pgp, postgresToTsType } from "../DboBuilder";
|
|
3
|
+
import { parseFieldFilter } from "../ViewHandler/parseFieldFilter";
|
|
4
|
+
import { asNameAlias } from "./QueryBuilder";
|
|
5
|
+
|
|
6
|
+
export const parseFunction = (funcData: { func: string | FunctionSpec, args: any[], functions: FunctionSpec[]; allowedFields: string[]; }): FunctionSpec => {
|
|
7
|
+
const { func, args, functions, allowedFields } = funcData;
|
|
8
|
+
|
|
9
|
+
/* Function is computed column. No checks needed */
|
|
10
|
+
if(typeof func !== "string"){
|
|
11
|
+
const computedCol = COMPUTED_FIELDS.find(c => c.name === func.name);
|
|
12
|
+
if(!computedCol) throw `Unexpected function: computed column spec not found for ${JSON.stringify(func.name)}`;
|
|
13
|
+
return func;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const funcName = func;
|
|
17
|
+
const makeErr = (msg: string): string => {
|
|
18
|
+
return `Issue with function ${JSON.stringify({ [funcName]: args })}: \n${msg}`
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/* Find function */
|
|
22
|
+
const funcDef = functions.find(f => f.name === funcName);
|
|
23
|
+
|
|
24
|
+
if(!funcDef) {
|
|
25
|
+
const sf = functions.filter(f => f.name.toLowerCase().slice(1).startsWith(funcName.toLowerCase())).sort((a, b) => (a.name.length - b.name.length));
|
|
26
|
+
const hint = (sf.length? `. \n Maybe you meant: \n | ${sf.map(s => s.name + " " + (s.description || "")).join(" \n | ")} ?` : "");
|
|
27
|
+
throw "\n Function " + funcName + " does not exist or is not allowed " + hint;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/* Validate fields */
|
|
31
|
+
const fields = funcDef.getFields(args);
|
|
32
|
+
if(fields !== "*"){
|
|
33
|
+
fields.forEach(fieldKey => {
|
|
34
|
+
if(typeof fieldKey !== "string" || !allowedFields.includes(fieldKey)) {
|
|
35
|
+
throw makeErr(`getFields() => field name ${JSON.stringify(fieldKey)} is invalid or disallowed`)
|
|
36
|
+
}
|
|
37
|
+
});
|
|
38
|
+
if((funcDef.minCols ?? 0) > fields.length){
|
|
39
|
+
throw makeErr(`Less columns provided than necessary (minCols=${funcDef.minCols})`)
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
if(funcDef.numArgs && funcDef.minCols !== 0 && fields !== "*" && Array.isArray(fields) && !fields.length) {
|
|
44
|
+
throw `\n Function "${funcDef.name}" expects at least a field name but has not been provided with one`;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
return funcDef;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
type GetQueryArgs = {
|
|
52
|
+
allColumns: ColumnInfo[];
|
|
53
|
+
allowedFields: string[];
|
|
54
|
+
args: any[];
|
|
55
|
+
tableAlias?: string;
|
|
56
|
+
ctidField?: string;
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
export type FieldSpec = {
|
|
60
|
+
name: string;
|
|
61
|
+
type: "column" | "computed";
|
|
62
|
+
/**
|
|
63
|
+
* allowedFields passed for multicol functions (e.g.: $rowhash)
|
|
64
|
+
*/
|
|
65
|
+
getQuery: (params: Omit<GetQueryArgs, "args">) => string;
|
|
66
|
+
};
|
|
67
|
+
|
|
68
|
+
export type FunctionSpec = {
|
|
69
|
+
name: string;
|
|
70
|
+
|
|
71
|
+
description?: string;
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* If true then it can be used in filters and is expected to return boolean
|
|
75
|
+
*/
|
|
76
|
+
canBeUsedForFilter?: boolean;
|
|
77
|
+
|
|
78
|
+
/**
|
|
79
|
+
* If true then the first argument is expected to be a column name
|
|
80
|
+
*/
|
|
81
|
+
singleColArg: boolean;
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* If true then this func can be used within where clause
|
|
85
|
+
*/
|
|
86
|
+
// returnsBoolean?: boolean;
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Number of arguments expected
|
|
90
|
+
*/
|
|
91
|
+
numArgs: number;
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* If provided then the number of column names provided to the function (from getFields()) must not be less than this
|
|
95
|
+
* By default every function is checked against numArgs
|
|
96
|
+
*/
|
|
97
|
+
minCols?: number;
|
|
98
|
+
|
|
99
|
+
type: "function" | "aggregation" | "computed";
|
|
100
|
+
/**
|
|
101
|
+
* getFields: string[] -> used to validate user supplied field names. It will be fired before querying to validate against allowed columns
|
|
102
|
+
* if not field names are used from arguments then return an empty array
|
|
103
|
+
*/
|
|
104
|
+
getFields: (args: any[]) => "*" | string[];
|
|
105
|
+
/**
|
|
106
|
+
* allowedFields passed for multicol functions (e.g.: $rowhash)
|
|
107
|
+
*/
|
|
108
|
+
getQuery: (params: GetQueryArgs) => string;
|
|
109
|
+
|
|
110
|
+
returnType?: PG_COLUMN_UDT_DATA_TYPE;
|
|
111
|
+
};
|
|
112
|
+
|
|
113
|
+
const MAX_COL_NUM = 1600;
|
|
114
|
+
const asValue = (v: any, castAs = "") => pgp.as.format("$1" + castAs, [v]);
|
|
115
|
+
|
|
116
|
+
const parseUnix = (colName: string, tableAlias: string | undefined, allColumns: ColumnInfo[], opts: { timeZone: boolean | string } | undefined) => {
|
|
117
|
+
let tz = "";
|
|
118
|
+
if(opts){
|
|
119
|
+
const { timeZone } = opts ?? {};
|
|
120
|
+
if(timeZone && typeof timeZone !== "string" && typeof timeZone !== "boolean"){
|
|
121
|
+
throw `Bad timeZone value. timeZone can be boolean or string`;
|
|
122
|
+
}
|
|
123
|
+
if(timeZone === true){
|
|
124
|
+
tz = "::TIMESTAMPTZ";
|
|
125
|
+
} else if(typeof timeZone === "string"){
|
|
126
|
+
tz = ` AT TIME ZONE ${asValue(timeZone)}`;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
const col = allColumns.find(c => c.name === colName);
|
|
130
|
+
if(!col) throw `Unexpected: column ${colName} not found`;
|
|
131
|
+
const escapedName = asNameAlias(colName, tableAlias);
|
|
132
|
+
if(col.udt_name === "int8"){
|
|
133
|
+
return `to_timestamp(${escapedName}/1000.0)${tz}`
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
return `${escapedName}${tz}`;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
const JSON_Funcs: FunctionSpec[] = [
|
|
140
|
+
{
|
|
141
|
+
name: "$jsonb_set",
|
|
142
|
+
description: "[columnName: string, path: (string | number)[], new_value?: any, create_missing?: boolean ] Returns target value (columnName) with the section designated by path replaced by new_value, or with new_value added if create_missing is true (default is true) and the item designated by path does not exist",
|
|
143
|
+
singleColArg: false,
|
|
144
|
+
numArgs: 4,
|
|
145
|
+
type: "function",
|
|
146
|
+
getFields: ([column]) => column,
|
|
147
|
+
getQuery: ({
|
|
148
|
+
args: [colName, path = [], new_value, create_missing = true],
|
|
149
|
+
tableAlias, allowedFields
|
|
150
|
+
}) => {
|
|
151
|
+
if(!allowedFields.includes(colName)) {
|
|
152
|
+
throw `Unexpected: column ${colName} not found`;
|
|
153
|
+
}
|
|
154
|
+
if(!path || !Array.isArray(path) || !path.every(v => ["number", "string"].includes(typeof v))){
|
|
155
|
+
throw "Expecting: [columnName: string, path: (string | number)[], new_value?: any, create_missing?: boolean ]"
|
|
156
|
+
}
|
|
157
|
+
const escapedName = asNameAlias(colName, tableAlias);
|
|
158
|
+
|
|
159
|
+
return `jsonb_set(${escapedName}, ${asValue(path)}, ${asValue(new_value)}, ${create_missing})`;
|
|
160
|
+
}
|
|
161
|
+
},
|
|
162
|
+
|
|
163
|
+
{
|
|
164
|
+
name: "$jsonb_path_query",
|
|
165
|
+
description: "[columnName: string, jsonPath: string, vars?: object, silent?: boolean]\n Returns all JSON items returned by the JSON path for the specified JSON value. The optional vars and silent arguments act the same as for jsonb_path_exists.",
|
|
166
|
+
singleColArg: false,
|
|
167
|
+
numArgs: 4,
|
|
168
|
+
type: "function",
|
|
169
|
+
getFields: ([column]) => column,
|
|
170
|
+
getQuery: ({
|
|
171
|
+
args: [colName, jsonPath, ...otherArgs],
|
|
172
|
+
tableAlias, allowedFields
|
|
173
|
+
}) => {
|
|
174
|
+
if(!allowedFields.includes(colName)) {
|
|
175
|
+
throw `Unexpected: column ${colName} not found`;
|
|
176
|
+
}
|
|
177
|
+
if(!jsonPath || typeof jsonPath !== "string"){
|
|
178
|
+
throw "Expecting: [columnName: string, jsonPath: string, vars?: object, silent?: boolean]"
|
|
179
|
+
}
|
|
180
|
+
const escapedName = asNameAlias(colName, tableAlias);
|
|
181
|
+
|
|
182
|
+
return `jsonb_path_query(${escapedName}, ${[jsonPath, ...otherArgs].map(v => asValue(v)).join(", ")})`;
|
|
183
|
+
}
|
|
184
|
+
},
|
|
185
|
+
|
|
186
|
+
...([
|
|
187
|
+
["jsonb_array_length", "Returns the number of elements in the outermost JSON array"],
|
|
188
|
+
["jsonb_each", "Expands the outermost JSON object into a set of key/value pairs"],
|
|
189
|
+
["jsonb_each_text", "Expands the outermost JSON object into a set of key/value pairs. The returned values will be of type text"],
|
|
190
|
+
["jsonb_object_keys", "Returns set of keys in the outermost JSON object"],
|
|
191
|
+
["jsonb_strip_nulls", "Returns from_json with all object fields that have null values omitted. Other null values are untouched"],
|
|
192
|
+
["jsonb_pretty", "Returns from_json as indented JSON text "],
|
|
193
|
+
["jsonb_to_record", "Builds an arbitrary record from a JSON object"],
|
|
194
|
+
["jsonb_array_elements", "Expands a JSON array to a set of JSON values"],
|
|
195
|
+
["jsonb_array_elements_text", "Expands a JSON array to a set of text values "],
|
|
196
|
+
["jsonb_typeof", "Returns the type of the outermost JSON value as a text string. Possible types are object, array, string, number, boolean, and null "],
|
|
197
|
+
] as const).map(([ name, description]) => ({
|
|
198
|
+
name: "$" + name,
|
|
199
|
+
description,
|
|
200
|
+
singleColArg: true,
|
|
201
|
+
numArgs: 1,
|
|
202
|
+
type: "function",
|
|
203
|
+
getFields: ([col]) => col,
|
|
204
|
+
getQuery: ({ args: [colName], tableAlias }) => {
|
|
205
|
+
const escapedName = asNameAlias(colName, tableAlias);
|
|
206
|
+
return `${name}(${escapedName})`;
|
|
207
|
+
}
|
|
208
|
+
} as FunctionSpec))
|
|
209
|
+
];
|
|
210
|
+
|
|
211
|
+
const FTS_Funcs: FunctionSpec[] =
|
|
212
|
+
/* Full text search
|
|
213
|
+
https://www.postgresql.org/docs/current/textsearch-dictionaries.html#TEXTSEARCH-SIMPLE-DICTIONARY
|
|
214
|
+
*/
|
|
215
|
+
[
|
|
216
|
+
"simple", // • convert the input token to lower case • exclude stop words
|
|
217
|
+
// "synonym", // replace word with a synonym
|
|
218
|
+
"english",
|
|
219
|
+
// "english_stem",
|
|
220
|
+
// "english_hunspell",
|
|
221
|
+
""
|
|
222
|
+
].map(type => ({
|
|
223
|
+
name: "$ts_headline" + (type? ("_" + type) : ""),
|
|
224
|
+
description: ` :[column_name <string>, search_term: <string | { to_tsquery: string } > ] -> sha512 hash of the of column content`,
|
|
225
|
+
type: "function" as const,
|
|
226
|
+
singleColArg: true,
|
|
227
|
+
numArgs: 2,
|
|
228
|
+
getFields: ([column]) => [column],
|
|
229
|
+
getQuery: ({ args }) => {
|
|
230
|
+
const col = asName(args[0]);
|
|
231
|
+
let qVal = args[1], qType = "to_tsquery";
|
|
232
|
+
const _type = type? (asValue(type) + ",") : "";
|
|
233
|
+
|
|
234
|
+
const searchTypes = TextFilter_FullTextSearchFilterKeys;
|
|
235
|
+
|
|
236
|
+
/* { to_tsquery: 'search term' } */
|
|
237
|
+
if(isPlainObject(qVal)){
|
|
238
|
+
const keys = Object.keys(qVal);
|
|
239
|
+
if(!keys.length) throw "Bad arg";
|
|
240
|
+
if(keys.length !==1 || !searchTypes.includes(keys[0] as any)) throw "Expecting a an object with a single key named one of: " + searchTypes.join(", ");
|
|
241
|
+
qType = keys[0]!;
|
|
242
|
+
qVal = asValue(qVal[qType]);
|
|
243
|
+
|
|
244
|
+
/* 'search term' */
|
|
245
|
+
} else if(typeof qVal === "string") {
|
|
246
|
+
qVal = pgp.as.format(qType + "($1)", [qVal])
|
|
247
|
+
} else throw "Bad second arg. Exepcting search string or { to_tsquery: 'search string' }";
|
|
248
|
+
|
|
249
|
+
const res = `ts_headline(${_type} ${col}::text, ${qVal}, 'ShortWord=1 ' )`
|
|
250
|
+
// console.log(res)
|
|
251
|
+
|
|
252
|
+
return res
|
|
253
|
+
}
|
|
254
|
+
}));
|
|
255
|
+
|
|
256
|
+
let PostGIS_Funcs: FunctionSpec[] = ([
|
|
257
|
+
{
|
|
258
|
+
fname: "ST_DWithin",
|
|
259
|
+
description: `:[column_name, { lat?: number; lng?: number; geojson?: object; srid?: number; use_spheroid?: boolean; distance: number; }]
|
|
260
|
+
-> Returns true if the geometries are within a given distance
|
|
261
|
+
For geometry: The distance is specified in units defined by the spatial reference system of the geometries. For this function to make sense, the source geometries must be in the same coordinate system (have the same SRID).
|
|
262
|
+
For geography: units are in meters and distance measurement defaults to use_spheroid=true. For faster evaluation use use_spheroid=false to measure on the sphere.
|
|
263
|
+
`
|
|
264
|
+
},
|
|
265
|
+
{
|
|
266
|
+
fname: "<->",
|
|
267
|
+
description: `:[column_name, { lat?: number; lng?: number; geojson?: object; srid?: number; use_spheroid?: boolean }]
|
|
268
|
+
-> The <-> operator returns the 2D distance between two geometries. Used in the "ORDER BY" clause provides index-assisted nearest-neighbor result sets. For PostgreSQL below 9.5 only gives centroid distance of bounding boxes and for PostgreSQL 9.5+, does true KNN distance search giving true distance between geometries, and distance sphere for geographies.`
|
|
269
|
+
},
|
|
270
|
+
{
|
|
271
|
+
fname: "ST_Distance",
|
|
272
|
+
description: ` :[column_name, { lat?: number; lng?: number; geojson?: object; srid?: number; use_spheroid?: boolean }]
|
|
273
|
+
-> For geometry types returns the minimum 2D Cartesian (planar) distance between two geometries, in projected units (spatial ref units).
|
|
274
|
+
-> For geography types defaults to return the minimum geodesic distance between two geographies in meters, compute on the spheroid determined by the SRID. If use_spheroid is false, a faster spherical calculation is used.
|
|
275
|
+
`,
|
|
276
|
+
},{
|
|
277
|
+
fname: "ST_DistanceSpheroid",
|
|
278
|
+
description: ` :[column_name, { lat?: number; lng?: number; geojson?: object; srid?: number; spheroid?: string; }] -> Returns minimum distance in meters between two lon/lat geometries given a particular spheroid. See the explanation of spheroids given for ST_LengthSpheroid.
|
|
279
|
+
|
|
280
|
+
`,
|
|
281
|
+
},{
|
|
282
|
+
fname: "ST_DistanceSphere",
|
|
283
|
+
description: ` :[column_name, { lat?: number; lng?: number; geojson?: object; srid?: number }] -> Returns linear distance in meters between two lon/lat points. Uses a spherical earth and radius of 6370986 meters. Faster than ST_DistanceSpheroid, but less accurate. Only implemented for points.`,
|
|
284
|
+
}
|
|
285
|
+
] as const).map(({ fname, description }) => ({
|
|
286
|
+
name: "$" + fname,
|
|
287
|
+
description,
|
|
288
|
+
type: "function" as const,
|
|
289
|
+
singleColArg: true,
|
|
290
|
+
numArgs: 1,
|
|
291
|
+
canBeUsedForFilter: fname === "ST_DWithin",
|
|
292
|
+
getFields: (args: any[]) => [args[0]],
|
|
293
|
+
getQuery: ({ allColumns, args: [columnName, arg2], tableAlias }) => {
|
|
294
|
+
const mErr = () => { throw `${fname}: Expecting a second argument like: { lat?: number; lng?: number; geojson?: object; srid?: number; use_spheroid?: boolean }` };
|
|
295
|
+
|
|
296
|
+
if(!isObject(arg2)) {
|
|
297
|
+
mErr();
|
|
298
|
+
}
|
|
299
|
+
const col = allColumns.find(c => c.name === columnName);
|
|
300
|
+
if(!col) {
|
|
301
|
+
throw new Error("Col not found: " + columnName)
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
const {
|
|
305
|
+
lat, lng, srid = 4326,
|
|
306
|
+
geojson, text, use_spheroid,
|
|
307
|
+
distance, spheroid = 'SPHEROID["WGS 84", 6378137, 298.257223563]',
|
|
308
|
+
unit,
|
|
309
|
+
debug
|
|
310
|
+
} = arg2;
|
|
311
|
+
let geomQ = "", extraParams = "";
|
|
312
|
+
|
|
313
|
+
if(typeof text === "string"){
|
|
314
|
+
geomQ = `ST_GeomFromText(${asValue(text)})`;
|
|
315
|
+
} else if([lat, lng].every(v => Number.isFinite(v))){
|
|
316
|
+
geomQ = `ST_Point(${asValue(lng)}, ${asValue(lat)})`;
|
|
317
|
+
} else if(isPlainObject(geojson)){
|
|
318
|
+
geomQ = `ST_GeomFromGeoJSON(${geojson})`;
|
|
319
|
+
} else mErr();
|
|
320
|
+
|
|
321
|
+
if(Number.isFinite(srid)){
|
|
322
|
+
geomQ = `ST_SetSRID(${geomQ}, ${asValue(srid)})`;
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
let colCast = "";
|
|
326
|
+
const colIsGeog = col.udt_name === "geography";
|
|
327
|
+
let geomQCast = colIsGeog? "::geography" : "::geometry";
|
|
328
|
+
|
|
329
|
+
/**
|
|
330
|
+
* float ST_Distance(geometry g1, geometry g2);
|
|
331
|
+
* float ST_Distance(geography geog1, geography geog2, boolean use_spheroid=true);
|
|
332
|
+
*/
|
|
333
|
+
if(fname === "ST_Distance"){
|
|
334
|
+
|
|
335
|
+
if(typeof use_spheroid === "boolean"){
|
|
336
|
+
extraParams = ", " + asValue(use_spheroid);
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
colCast = (colIsGeog || use_spheroid)? "::geography" : "::geometry";
|
|
340
|
+
geomQCast = (colIsGeog || use_spheroid)? "::geography" : "::geometry";
|
|
341
|
+
|
|
342
|
+
/**
|
|
343
|
+
* boolean ST_DWithin(geometry g1, geometry g2, double precision distance_of_srid);
|
|
344
|
+
* boolean ST_DWithin(geography gg1, geography gg2, double precision distance_meters, boolean use_spheroid = true);
|
|
345
|
+
*/
|
|
346
|
+
} else if(fname === "ST_DWithin"){
|
|
347
|
+
colCast = colIsGeog? "::geography" : "::geometry";
|
|
348
|
+
geomQCast = colIsGeog? "::geography" : "::geometry";
|
|
349
|
+
|
|
350
|
+
if(typeof distance !== "number") {
|
|
351
|
+
throw `ST_DWithin: distance param missing or not a number`;
|
|
352
|
+
}
|
|
353
|
+
const allowedUnits = ["m", "km"];
|
|
354
|
+
if(unit && !allowedUnits.includes(unit)){
|
|
355
|
+
throw `ST_DWithin: unit can only be one of: ${allowedUnits}`;
|
|
356
|
+
}
|
|
357
|
+
extraParams = ", " + asValue(distance * (unit === "km"? 1000 : 1));
|
|
358
|
+
|
|
359
|
+
|
|
360
|
+
/**
|
|
361
|
+
* float ST_DistanceSpheroid(geometry geomlonlatA, geometry geomlonlatB, spheroid measurement_spheroid);
|
|
362
|
+
*/
|
|
363
|
+
} else if(fname === "ST_DistanceSpheroid"){
|
|
364
|
+
colCast = "::geometry";
|
|
365
|
+
geomQCast = "::geometry";
|
|
366
|
+
if(typeof spheroid !== "string") throw `ST_DistanceSpheroid: spheroid param must be string`;
|
|
367
|
+
extraParams = `, ${asValue(spheroid)}`
|
|
368
|
+
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
/**
|
|
372
|
+
* float ST_DistanceSphere(geometry geomlonlatA, geometry geomlonlatB);
|
|
373
|
+
*/
|
|
374
|
+
} else if(fname === "ST_DistanceSphere"){
|
|
375
|
+
colCast = "::geometry";
|
|
376
|
+
geomQCast = "::geometry";
|
|
377
|
+
extraParams = "";
|
|
378
|
+
|
|
379
|
+
/**
|
|
380
|
+
* double precision <->( geometry A , geometry B );
|
|
381
|
+
* double precision <->( geography A , geography B );
|
|
382
|
+
*/
|
|
383
|
+
} else if(fname === "<->"){
|
|
384
|
+
colCast = colIsGeog? "::geography" : "::geometry";
|
|
385
|
+
geomQCast = colIsGeog? "::geography" : "::geometry";
|
|
386
|
+
const q = pgp.as.format(`${asNameAlias(columnName, tableAlias)}${colCast} <-> ${geomQ}${geomQCast}`);
|
|
387
|
+
if(debug) throw q;
|
|
388
|
+
return q;
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
const query = pgp.as.format(`${fname}(${asNameAlias(columnName, tableAlias)}${colCast} , ${geomQ}${geomQCast} ${extraParams})`);
|
|
392
|
+
if(debug) {
|
|
393
|
+
throw query;
|
|
394
|
+
}
|
|
395
|
+
return query;
|
|
396
|
+
}
|
|
397
|
+
}));
|
|
398
|
+
|
|
399
|
+
PostGIS_Funcs = PostGIS_Funcs.concat(
|
|
400
|
+
[
|
|
401
|
+
"ST_AsText", "ST_AsEWKT", "ST_AsEWKB", "ST_AsBinary", "ST_AsMVT", "ST_AsMVTGeom",
|
|
402
|
+
"ST_AsGeoJSON", "ST_Simplify",
|
|
403
|
+
"ST_SnapToGrid", "ST_Centroid",
|
|
404
|
+
"st_aslatlontext",
|
|
405
|
+
]
|
|
406
|
+
.map(fname => {
|
|
407
|
+
const res: FunctionSpec = {
|
|
408
|
+
name: "$" + fname,
|
|
409
|
+
description: ` :[column_name, precision?] -> json GeoJSON output of a geometry column`,
|
|
410
|
+
type: "function",
|
|
411
|
+
singleColArg: true,
|
|
412
|
+
numArgs: 1,
|
|
413
|
+
getFields: (args: any[]) => [args[0]],
|
|
414
|
+
getQuery: ({ args: [colName, ...otherArgs], tableAlias }) => {
|
|
415
|
+
let secondArg = "";
|
|
416
|
+
if(otherArgs.length) secondArg = ", " + otherArgs.map(arg => asValue(arg)).join(", ");
|
|
417
|
+
const escTabelName = asNameAlias(colName, tableAlias) + "::geometry";
|
|
418
|
+
const result = pgp.as.format(fname + "(" + escTabelName + secondArg + ( fname === "ST_AsGeoJSON"? ")::jsonb" : ")" ));
|
|
419
|
+
if(["ST_Centroid", "ST_SnapToGrid", "ST_Simplify"].includes(fname)){
|
|
420
|
+
const r = `ST_AsGeoJSON(${result})::jsonb`;
|
|
421
|
+
return r;
|
|
422
|
+
}
|
|
423
|
+
return result;
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
return res;
|
|
427
|
+
}),
|
|
428
|
+
);
|
|
429
|
+
|
|
430
|
+
|
|
431
|
+
PostGIS_Funcs = PostGIS_Funcs.concat(
|
|
432
|
+
["ST_Extent", "ST_3DExtent", "ST_XMin_Agg", "ST_XMax_Agg", "ST_YMin_Agg", "ST_YMax_Agg", "ST_ZMin_Agg", "ST_ZMax_Agg"]
|
|
433
|
+
.map(fname => {
|
|
434
|
+
const res: FunctionSpec = {
|
|
435
|
+
name: "$" + fname,
|
|
436
|
+
description: ` :[column_name] -> ST_Extent returns a bounding box that encloses a set of geometries.
|
|
437
|
+
The ST_Extent function is an "aggregate" function in the terminology of SQL.
|
|
438
|
+
That means that it operates on lists of data, in the same way the SUM() and AVG() functions do.`,
|
|
439
|
+
type: "aggregation",
|
|
440
|
+
singleColArg: true,
|
|
441
|
+
numArgs: 1,
|
|
442
|
+
getFields: (args: any[]) => [args[0]],
|
|
443
|
+
getQuery: ({ args, tableAlias }) => {
|
|
444
|
+
const escTabelName = asNameAlias(args[0], tableAlias) + "::geometry";
|
|
445
|
+
if(fname.includes("Extent")){
|
|
446
|
+
return `${fname}(${escTabelName})`;
|
|
447
|
+
}
|
|
448
|
+
return `${fname.endsWith("_Agg")? fname.slice(0, -4) : fname}(ST_Collect(${escTabelName}))`;
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
return res;
|
|
452
|
+
}),
|
|
453
|
+
);
|
|
454
|
+
|
|
455
|
+
PostGIS_Funcs = PostGIS_Funcs.concat(
|
|
456
|
+
["ST_Length", "ST_X", "ST_Y", "ST_Z"].map(fname => ({
|
|
457
|
+
name: "$" + fname,
|
|
458
|
+
type: "function",
|
|
459
|
+
singleColArg: true,
|
|
460
|
+
numArgs: 1,
|
|
461
|
+
getFields: (args: any[]) => [args[0]],
|
|
462
|
+
getQuery: ({ allColumns, args, tableAlias }) => {
|
|
463
|
+
const colName = args[0];
|
|
464
|
+
const escapedColName = asNameAlias(colName, tableAlias);
|
|
465
|
+
const col = allColumns.find(c => c.name === colName);
|
|
466
|
+
if(!col) throw new Error("Col not found: " + colName)
|
|
467
|
+
|
|
468
|
+
return `${fname}(${escapedColName})`;
|
|
469
|
+
}
|
|
470
|
+
}))
|
|
471
|
+
);
|
|
472
|
+
|
|
473
|
+
/**
|
|
474
|
+
* Each function expects a column at the very least
|
|
475
|
+
*/
|
|
476
|
+
export const FUNCTIONS: FunctionSpec[] = [
|
|
477
|
+
|
|
478
|
+
// Hashing
|
|
479
|
+
{
|
|
480
|
+
name: "$md5_multi",
|
|
481
|
+
description: ` :[...column_names] -> md5 hash of the column content`,
|
|
482
|
+
type: "function",
|
|
483
|
+
singleColArg: false,
|
|
484
|
+
numArgs: MAX_COL_NUM,
|
|
485
|
+
getFields: (args: any[]) => args,
|
|
486
|
+
getQuery: ({ args, tableAlias }) => {
|
|
487
|
+
const q = pgp.as.format("md5(" + args.map(fname => "COALESCE( " + asNameAlias(fname, tableAlias) + "::text, '' )" ).join(" || ") + ")");
|
|
488
|
+
return q
|
|
489
|
+
}
|
|
490
|
+
},
|
|
491
|
+
{
|
|
492
|
+
name: "$md5_multi_agg",
|
|
493
|
+
description: ` :[...column_names] -> md5 hash of the string aggregation of column content`,
|
|
494
|
+
type: "aggregation",
|
|
495
|
+
singleColArg: false,
|
|
496
|
+
numArgs: MAX_COL_NUM,
|
|
497
|
+
getFields: (args: any[]) => args,
|
|
498
|
+
getQuery: ({ args, tableAlias }) => {
|
|
499
|
+
const q = pgp.as.format("md5(string_agg(" + args.map(fname => "COALESCE( " + asNameAlias(fname, tableAlias) + "::text, '' )" ).join(" || ") + ", ','))");
|
|
500
|
+
return q
|
|
501
|
+
}
|
|
502
|
+
},
|
|
503
|
+
|
|
504
|
+
{
|
|
505
|
+
name: "$sha256_multi",
|
|
506
|
+
description: ` :[...column_names] -> sha256 hash of the of column content`,
|
|
507
|
+
type: "function",
|
|
508
|
+
singleColArg: false,
|
|
509
|
+
numArgs: MAX_COL_NUM,
|
|
510
|
+
getFields: (args: any[]) => args,
|
|
511
|
+
getQuery: ({ args, tableAlias }) => {
|
|
512
|
+
const q = pgp.as.format("encode(sha256((" + args.map(fname => "COALESCE( " + asNameAlias(fname, tableAlias) + ", '' )" ).join(" || ") + ")::text::bytea), 'hex')");
|
|
513
|
+
return q
|
|
514
|
+
}
|
|
515
|
+
},
|
|
516
|
+
{
|
|
517
|
+
name: "$sha256_multi_agg",
|
|
518
|
+
description: ` :[...column_names] -> sha256 hash of the string aggregation of column content`,
|
|
519
|
+
type: "aggregation",
|
|
520
|
+
singleColArg: false,
|
|
521
|
+
numArgs: MAX_COL_NUM,
|
|
522
|
+
getFields: (args: any[]) => args,
|
|
523
|
+
getQuery: ({ args, tableAlias }) => {
|
|
524
|
+
const q = pgp.as.format("encode(sha256(string_agg(" + args.map(fname => "COALESCE( " + asNameAlias(fname, tableAlias) + ", '' )" ).join(" || ") + ", ',')::text::bytea), 'hex')");
|
|
525
|
+
return q
|
|
526
|
+
}
|
|
527
|
+
},
|
|
528
|
+
{
|
|
529
|
+
name: "$sha512_multi",
|
|
530
|
+
description: ` :[...column_names] -> sha512 hash of the of column content`,
|
|
531
|
+
type: "function",
|
|
532
|
+
singleColArg: false,
|
|
533
|
+
numArgs: MAX_COL_NUM,
|
|
534
|
+
getFields: (args: any[]) => args,
|
|
535
|
+
getQuery: ({ args, tableAlias }) => {
|
|
536
|
+
const q = pgp.as.format("encode(sha512((" + args.map(fname => "COALESCE( " + asNameAlias(fname, tableAlias) + ", '' )" ).join(" || ") + ")::text::bytea), 'hex')");
|
|
537
|
+
return q
|
|
538
|
+
}
|
|
539
|
+
},
|
|
540
|
+
{
|
|
541
|
+
name: "$sha512_multi_agg",
|
|
542
|
+
description: ` :[...column_names] -> sha512 hash of the string aggregation of column content`,
|
|
543
|
+
type: "aggregation",
|
|
544
|
+
singleColArg: false,
|
|
545
|
+
numArgs: MAX_COL_NUM,
|
|
546
|
+
getFields: (args: any[]) => args,
|
|
547
|
+
getQuery: ({ args, tableAlias }) => {
|
|
548
|
+
const q = pgp.as.format("encode(sha512(string_agg(" + args.map(fname => "COALESCE( " + asNameAlias(fname, tableAlias) + ", '' )" ).join(" || ") + ", ',')::text::bytea), 'hex')");
|
|
549
|
+
return q
|
|
550
|
+
}
|
|
551
|
+
},
|
|
552
|
+
|
|
553
|
+
...FTS_Funcs,
|
|
554
|
+
|
|
555
|
+
...JSON_Funcs,
|
|
556
|
+
|
|
557
|
+
...PostGIS_Funcs,
|
|
558
|
+
|
|
559
|
+
{
|
|
560
|
+
name: "$left",
|
|
561
|
+
description: ` :[column_name, number] -> substring`,
|
|
562
|
+
type: "function",
|
|
563
|
+
numArgs: 2,
|
|
564
|
+
singleColArg: false,
|
|
565
|
+
getFields: (args: any[]) => [args[0]],
|
|
566
|
+
getQuery: ({ allowedFields, args, tableAlias }) => {
|
|
567
|
+
return pgp.as.format("LEFT(" + asNameAlias(args[0], tableAlias) + ", $1)", [args[1]]);
|
|
568
|
+
}
|
|
569
|
+
},
|
|
570
|
+
{
|
|
571
|
+
name: "$unnest_words",
|
|
572
|
+
description: ` :[column_name] -> Splits string at spaces`,
|
|
573
|
+
type: "function",
|
|
574
|
+
numArgs: 1,
|
|
575
|
+
singleColArg: true,
|
|
576
|
+
getFields: (args: any[]) => [args[0]],
|
|
577
|
+
getQuery: ({ allowedFields, args, tableAlias }) => {
|
|
578
|
+
return pgp.as.format("unnest(string_to_array(" + asNameAlias(args[0], tableAlias) + "::TEXT , ' '))");//, [args[1]]
|
|
579
|
+
}
|
|
580
|
+
},
|
|
581
|
+
{
|
|
582
|
+
name: "$right",
|
|
583
|
+
description: ` :[column_name, number] -> substring`,
|
|
584
|
+
type: "function",
|
|
585
|
+
numArgs: 2,
|
|
586
|
+
singleColArg: false,
|
|
587
|
+
getFields: (args: any[]) => [args[0]],
|
|
588
|
+
getQuery: ({ allowedFields, args, tableAlias }) => {
|
|
589
|
+
return pgp.as.format("RIGHT(" + asNameAlias(args[0], tableAlias) + ", $1)", [args[1]]);
|
|
590
|
+
}
|
|
591
|
+
},
|
|
592
|
+
|
|
593
|
+
{
|
|
594
|
+
name: "$to_char",
|
|
595
|
+
type: "function",
|
|
596
|
+
description: ` :[column_name, format<string>] -> format dates and strings. Eg: [current_timestamp, 'HH12:MI:SS']`,
|
|
597
|
+
singleColArg: false,
|
|
598
|
+
numArgs: 2,
|
|
599
|
+
getFields: (args: any[]) => [args[0]],
|
|
600
|
+
getQuery: ({ allowedFields, args, tableAlias }) => {
|
|
601
|
+
if(args.length === 3){
|
|
602
|
+
return pgp.as.format("to_char(" + asNameAlias(args[0], tableAlias) + ", $2, $3)", [args[0], args[1], args[2]]);
|
|
603
|
+
}
|
|
604
|
+
return pgp.as.format("to_char(" + asNameAlias(args[0], tableAlias) + ", $2)", [args[0], args[1]]);
|
|
605
|
+
}
|
|
606
|
+
},
|
|
607
|
+
|
|
608
|
+
/**
|
|
609
|
+
* Date trunc utils
|
|
610
|
+
*/
|
|
611
|
+
...[
|
|
612
|
+
"microsecond",
|
|
613
|
+
"millisecond",
|
|
614
|
+
"second",
|
|
615
|
+
"minute",
|
|
616
|
+
"hour",
|
|
617
|
+
"day",
|
|
618
|
+
"week",
|
|
619
|
+
"month",
|
|
620
|
+
"quarter",
|
|
621
|
+
"year",
|
|
622
|
+
"decade",
|
|
623
|
+
"century",
|
|
624
|
+
"millennium"
|
|
625
|
+
].map(k => ({ val: 0, unit: k }))
|
|
626
|
+
.concat([
|
|
627
|
+
{ val: 6, unit: 'month' },
|
|
628
|
+
{ val: 4, unit: 'month' },
|
|
629
|
+
{ val: 2, unit: 'month' },
|
|
630
|
+
{ val: 8, unit: 'hour' },
|
|
631
|
+
{ val: 4, unit: 'hour' },
|
|
632
|
+
{ val: 2, unit: 'hour' },
|
|
633
|
+
{ val: 30, unit: 'minute' },
|
|
634
|
+
{ val: 15, unit: 'minute' },
|
|
635
|
+
{ val: 6, unit: 'minute' },
|
|
636
|
+
{ val: 5, unit: 'minute' },
|
|
637
|
+
{ val: 4, unit: 'minute' },
|
|
638
|
+
{ val: 3, unit: 'minute' },
|
|
639
|
+
{ val: 2, unit: 'minute' },
|
|
640
|
+
{ val: 30, unit: 'second' },
|
|
641
|
+
{ val: 15, unit: 'second' },
|
|
642
|
+
{ val: 10, unit: 'second' },
|
|
643
|
+
{ val: 8, unit: 'second' },
|
|
644
|
+
{ val: 6, unit: 'second' },
|
|
645
|
+
{ val: 5, unit: 'second' },
|
|
646
|
+
{ val: 4, unit: 'second' },
|
|
647
|
+
{ val: 3, unit: 'second' },
|
|
648
|
+
{ val: 2, unit: 'second' },
|
|
649
|
+
|
|
650
|
+
{ val: 500, unit: 'millisecond' },
|
|
651
|
+
{ val: 250, unit: 'millisecond' },
|
|
652
|
+
{ val: 100, unit: 'millisecond' },
|
|
653
|
+
{ val: 50, unit: 'millisecond' },
|
|
654
|
+
{ val: 25, unit: 'millisecond' },
|
|
655
|
+
{ val: 10, unit: 'millisecond' },
|
|
656
|
+
{ val: 5, unit: 'millisecond' },
|
|
657
|
+
{ val: 2, unit: 'millisecond' },
|
|
658
|
+
]).map(({ val, unit }) => ({
|
|
659
|
+
name: "$date_trunc_" + (val || "") + unit,
|
|
660
|
+
type: "function",
|
|
661
|
+
description: ` :[column_name, opts?: { timeZone: true | 'TZ Name' }] -> round down timestamp to closest ${val || ""} ${unit} `,
|
|
662
|
+
singleColArg: true,
|
|
663
|
+
numArgs: 2,
|
|
664
|
+
getFields: (args: any[]) => [args[0]],
|
|
665
|
+
getQuery: ({ allColumns, args, tableAlias }) => {
|
|
666
|
+
/** Timestamp added to ensure filters work correctly (psql will loose the string value timezone when comparing to a non tz column) */
|
|
667
|
+
const col = parseUnix(args[0], tableAlias, allColumns, args[1]);
|
|
668
|
+
if(!val) return `date_trunc(${asValue(unit)}, ${col})`;
|
|
669
|
+
const PreviousUnit = {
|
|
670
|
+
year: "decade",
|
|
671
|
+
month: "year",
|
|
672
|
+
hour: "day",
|
|
673
|
+
minute: "hour",
|
|
674
|
+
second: "minute",
|
|
675
|
+
millisecond: "second",
|
|
676
|
+
microsecond: "millisecond",
|
|
677
|
+
};
|
|
678
|
+
|
|
679
|
+
const prevUnit = PreviousUnit[unit as "month"];
|
|
680
|
+
if(!prevUnit){
|
|
681
|
+
throw "Not supported. prevUnit not found";
|
|
682
|
+
}
|
|
683
|
+
|
|
684
|
+
let extractedUnit = `date_part(${asValue(unit, "::text")}, ${col})::int`;
|
|
685
|
+
if(unit === "microsecond" || unit === "millisecond"){
|
|
686
|
+
extractedUnit = `(${extractedUnit} - 1000 * floor(${extractedUnit}/1000)::int)`
|
|
687
|
+
}
|
|
688
|
+
const res = `(date_trunc(${asValue(prevUnit)}, ${col}) + floor(${extractedUnit} / ${val}) * interval ${asValue(val + " " + unit)})`;
|
|
689
|
+
// console.log(res);
|
|
690
|
+
return res;
|
|
691
|
+
}
|
|
692
|
+
} as FunctionSpec)),
|
|
693
|
+
|
|
694
|
+
/* Date funcs date_part */
|
|
695
|
+
...["date_trunc", "date_part"].map(funcName => ({
|
|
696
|
+
name: "$" + funcName,
|
|
697
|
+
type: "function",
|
|
698
|
+
numArgs: 3,
|
|
699
|
+
description: ` :[unit<string>, column_name, opts?: { timeZone: true | string }] -> ` + (funcName === "date_trunc"? ` round down timestamp to closest unit value. ` : ` extract date unit as float8. ` ) + ` E.g. ['hour', col] `,
|
|
700
|
+
singleColArg: false,
|
|
701
|
+
getFields: (args: any[]) => [args[1]],
|
|
702
|
+
getQuery: ({ allColumns, args, tableAlias }) => {
|
|
703
|
+
return `${funcName}(${asValue(args[0])}, ${parseUnix(args[1], tableAlias, allColumns, args[2])})`;
|
|
704
|
+
}
|
|
705
|
+
} as FunctionSpec)),
|
|
706
|
+
|
|
707
|
+
/* Handy date funcs */
|
|
708
|
+
...[
|
|
709
|
+
["date", "YYYY-MM-DD"],
|
|
710
|
+
["datetime", "YYYY-MM-DD HH24:MI"],
|
|
711
|
+
["datetime_", "YYYY_MM_DD__HH24_MI"],
|
|
712
|
+
["timedate", "HH24:MI YYYY-MM-DD"],
|
|
713
|
+
|
|
714
|
+
["time", "HH24:MI"],
|
|
715
|
+
["time12", "HH:MI"],
|
|
716
|
+
["timeAM", "HH:MI AM"],
|
|
717
|
+
|
|
718
|
+
["dy", "dy"],
|
|
719
|
+
["Dy", "Dy"],
|
|
720
|
+
["day", "day"],
|
|
721
|
+
["Day", "Day"],
|
|
722
|
+
|
|
723
|
+
["DayNo", "DD"],
|
|
724
|
+
["DD", "DD"],
|
|
725
|
+
|
|
726
|
+
["dowUS", "D"],
|
|
727
|
+
["D", "D"],
|
|
728
|
+
["dow", "ID"],
|
|
729
|
+
["ID", "ID"],
|
|
730
|
+
|
|
731
|
+
["MonthNo", "MM"],
|
|
732
|
+
["MM", "MM"],
|
|
733
|
+
|
|
734
|
+
["mon", "mon"],
|
|
735
|
+
["Mon", "Mon"],
|
|
736
|
+
["month", "month"],
|
|
737
|
+
["Month", "Month"],
|
|
738
|
+
|
|
739
|
+
["year", "yyyy"],
|
|
740
|
+
["yyyy", "yyyy"],
|
|
741
|
+
["yy", "yy"],
|
|
742
|
+
["yr", "yy"],
|
|
743
|
+
].map(([funcName, txt]) => ({
|
|
744
|
+
name: "$" + funcName,
|
|
745
|
+
type: "function",
|
|
746
|
+
description: ` :[column_name, opts?: { timeZone: true | string }] -> get timestamp formated as ` + txt,
|
|
747
|
+
singleColArg: true,
|
|
748
|
+
numArgs: 1,
|
|
749
|
+
getFields: (args: any[]) => [args[0]],
|
|
750
|
+
getQuery: ({ allColumns, args, tableAlias }) => {
|
|
751
|
+
return pgp.as.format("trim(to_char(" + parseUnix(args[0], tableAlias, allColumns, args[1]) + ", $2))", [args[0], txt]);
|
|
752
|
+
}
|
|
753
|
+
} as FunctionSpec)),
|
|
754
|
+
|
|
755
|
+
/* Basic 1 arg col funcs */
|
|
756
|
+
...[
|
|
757
|
+
...["TEXT"].flatMap(cast => [
|
|
758
|
+
"upper", "lower", "length", "reverse", "trim", "initcap"
|
|
759
|
+
].map(funcName => ({ cast, funcName }))),
|
|
760
|
+
...[""].flatMap(cast => [
|
|
761
|
+
"round", "ceil", "floor", "sign", "md5"
|
|
762
|
+
].map(funcName => ({ cast, funcName }))),
|
|
763
|
+
].map(({ funcName, cast }) => ({
|
|
764
|
+
name: "$" + funcName,
|
|
765
|
+
type: "function",
|
|
766
|
+
numArgs: 1,
|
|
767
|
+
singleColArg: true,
|
|
768
|
+
getFields: (args: any[]) => [args[0]],
|
|
769
|
+
getQuery: ({ args, tableAlias }) => {
|
|
770
|
+
return `${funcName}(${asNameAlias(args[0], tableAlias)}${cast? `::${cast}`: ""})`;
|
|
771
|
+
}
|
|
772
|
+
} as FunctionSpec)),
|
|
773
|
+
|
|
774
|
+
/**
|
|
775
|
+
* Interval funcs
|
|
776
|
+
* (col1, col2?, trunc )
|
|
777
|
+
* */
|
|
778
|
+
...["age", "ageNow", "difference"].map(funcName => ({
|
|
779
|
+
name: "$" + funcName,
|
|
780
|
+
type: "function",
|
|
781
|
+
numArgs: 2,
|
|
782
|
+
singleColArg: true,
|
|
783
|
+
getFields: (args: any[]) => args.slice(0, 2).filter(a => typeof a === "string"), // Filtered because the second arg is optional
|
|
784
|
+
getQuery: ({ allowedFields, args, tableAlias, allColumns }) => {
|
|
785
|
+
const validColCount = args.slice(0, 2).filter(a => typeof a === "string").length;
|
|
786
|
+
const trunc = args[2];
|
|
787
|
+
const allowedTruncs = ["second", "minute", "hour", "day", "month", "year"];
|
|
788
|
+
if(trunc && !allowedTruncs.includes(trunc)) throw new Error("Incorrect trunc provided. Allowed values: " + allowedTruncs)
|
|
789
|
+
if(funcName === "difference" && validColCount !== 2) throw new Error("Must have two column names")
|
|
790
|
+
if(![1,2].includes(validColCount)) throw new Error("Must have one or two column names")
|
|
791
|
+
const [leftField, rightField] = args as [string, string];
|
|
792
|
+
const tzOpts = args[2];
|
|
793
|
+
const leftQ = parseUnix(leftField, tableAlias, allColumns, tzOpts);
|
|
794
|
+
let rightQ = rightField? parseUnix(rightField, tableAlias, allColumns, tzOpts) : "";
|
|
795
|
+
let query = "";
|
|
796
|
+
if(funcName === "ageNow" && validColCount === 1){
|
|
797
|
+
query = `age(now(), ${leftQ})`;
|
|
798
|
+
} else if(funcName === "age" || funcName === "ageNow"){
|
|
799
|
+
if(rightQ) rightQ = ", " + rightQ;
|
|
800
|
+
query = `age(${leftQ} ${rightQ})`;
|
|
801
|
+
} else {
|
|
802
|
+
query = `${leftQ} - ${rightQ}`;
|
|
803
|
+
}
|
|
804
|
+
return trunc? `date_trunc(${asValue(trunc)}, ${query})` : query;
|
|
805
|
+
}
|
|
806
|
+
} as FunctionSpec)),
|
|
807
|
+
|
|
808
|
+
/* pgcrypto funcs */
|
|
809
|
+
...["crypt"].map(funcName => ({
|
|
810
|
+
name: "$" + funcName,
|
|
811
|
+
type: "function",
|
|
812
|
+
numArgs: 1,
|
|
813
|
+
singleColArg: false,
|
|
814
|
+
getFields: (args: any[]) => [args[1]],
|
|
815
|
+
getQuery: ({ allowedFields, args, tableAlias }) => {
|
|
816
|
+
const value = asValue(args[0]) + "",
|
|
817
|
+
seedColumnName = asNameAlias(args[1], tableAlias);
|
|
818
|
+
|
|
819
|
+
return `crypt(${value}, ${seedColumnName}::text)`;
|
|
820
|
+
}
|
|
821
|
+
} as FunctionSpec)),
|
|
822
|
+
|
|
823
|
+
/* Text col and value funcs */
|
|
824
|
+
...["position", "position_lower"].map(funcName => ({
|
|
825
|
+
name: "$" + funcName,
|
|
826
|
+
type: "function",
|
|
827
|
+
numArgs: 1,
|
|
828
|
+
singleColArg: false,
|
|
829
|
+
getFields: (args: any[]) => [args[1]],
|
|
830
|
+
getQuery: ({ allowedFields, args, tableAlias }) => {
|
|
831
|
+
let a1 = asValue(args[0]),
|
|
832
|
+
a2 = asNameAlias(args[1], tableAlias);
|
|
833
|
+
if(funcName === "position_lower"){
|
|
834
|
+
a1 = `LOWER(${a1}::text)`;
|
|
835
|
+
a2 = `LOWER(${a2}::text)`;
|
|
836
|
+
}
|
|
837
|
+
return `position( ${a1} IN ${a2} )`;
|
|
838
|
+
}
|
|
839
|
+
} as FunctionSpec)),
|
|
840
|
+
...["template_string"].map(funcName => ({
|
|
841
|
+
name: "$" + funcName,
|
|
842
|
+
type: "function",
|
|
843
|
+
numArgs: 1,
|
|
844
|
+
minCols: 0,
|
|
845
|
+
singleColArg: false,
|
|
846
|
+
getFields: (args: any[]) => [] as string[], // Fields not validated because we'll use the allowed ones anyway
|
|
847
|
+
getQuery: ({ allowedFields, args, tableAlias }) => {
|
|
848
|
+
if(typeof args[0] !== "string") throw "First argument must be a string. E.g.: '{col1} ..text {col2} ...' ";
|
|
849
|
+
|
|
850
|
+
const rawValue = args[0];
|
|
851
|
+
let finalValue = rawValue;
|
|
852
|
+
const usedColumns = allowedFields.filter(fName => rawValue.includes(`{${fName}}`));
|
|
853
|
+
usedColumns.forEach((colName, idx) => {
|
|
854
|
+
finalValue = finalValue.split(`{${colName}}`).join(`%${idx + 1}$s`)
|
|
855
|
+
});
|
|
856
|
+
finalValue = asValue(finalValue);
|
|
857
|
+
|
|
858
|
+
if(usedColumns.length){
|
|
859
|
+
return `format(${finalValue}, ${usedColumns.map(c => `${asNameAlias(c, tableAlias)}::TEXT`).join(", ")})`;
|
|
860
|
+
}
|
|
861
|
+
|
|
862
|
+
return `format(${finalValue})`;
|
|
863
|
+
}
|
|
864
|
+
} as FunctionSpec)),
|
|
865
|
+
|
|
866
|
+
/** Custom highlight -> myterm => ['some text and', ['myterm'], ' and some other text']
|
|
867
|
+
* (fields: "*" | string[], term: string, { edgeTruncate: number = -1; noFields: boolean = false }) => string | (string | [string])[]
|
|
868
|
+
* edgeTruncate = maximum extra characters left and right of matches
|
|
869
|
+
* noFields = exclude field names in search
|
|
870
|
+
* */
|
|
871
|
+
{
|
|
872
|
+
name: "$term_highlight", /* */
|
|
873
|
+
description: ` :[column_names<string[] | "*">, search_term<string>, opts?<{ returnIndex?: number; edgeTruncate?: number; noFields?: boolean }>] -> get case-insensitive text match highlight`,
|
|
874
|
+
type: "function",
|
|
875
|
+
numArgs: 1,
|
|
876
|
+
singleColArg: true,
|
|
877
|
+
canBeUsedForFilter: true,
|
|
878
|
+
getFields: (args: any[]) => args[0],
|
|
879
|
+
getQuery: ({ allowedFields, args, tableAlias, allColumns }) => {
|
|
880
|
+
|
|
881
|
+
const cols = parseFieldFilter(args[0], false, allowedFields);
|
|
882
|
+
let term = args[1];
|
|
883
|
+
const rawTerm = args[1];
|
|
884
|
+
const { edgeTruncate, noFields = false, returnType, matchCase = false } = args[2] || {};
|
|
885
|
+
if(!isEmpty(args[2])){
|
|
886
|
+
const keys = Object.keys(args[2]);
|
|
887
|
+
const validKeys = ["edgeTruncate", "noFields", "returnType", "matchCase"];
|
|
888
|
+
const bad_keys = keys.filter(k => !validKeys.includes(k));
|
|
889
|
+
if(bad_keys.length) throw "Invalid options provided for $term_highlight. Expecting one of: " + validKeys.join(", ");
|
|
890
|
+
}
|
|
891
|
+
if(!cols.length) throw "Cols are empty/invalid";
|
|
892
|
+
if(typeof term !== "string") throw "Non string term provided: " + term;
|
|
893
|
+
if(edgeTruncate !== undefined && (!Number.isInteger(edgeTruncate) || edgeTruncate < -1)) throw "Invalid edgeTruncate. expecting a positive integer";
|
|
894
|
+
if(typeof noFields !== "boolean") throw "Invalid noFields. expecting boolean";
|
|
895
|
+
const RETURN_TYPES = ["index", "boolean", "object"];
|
|
896
|
+
if(returnType && !RETURN_TYPES.includes(returnType)){
|
|
897
|
+
throw `returnType can only be one of: ${RETURN_TYPES}`
|
|
898
|
+
}
|
|
899
|
+
|
|
900
|
+
const makeTextMatcherArray = (rawText: string, _term: string) => {
|
|
901
|
+
let matchText = rawText, term = _term;
|
|
902
|
+
if(!matchCase) {
|
|
903
|
+
matchText = `LOWER(${rawText})`
|
|
904
|
+
term = `LOWER(${term})`
|
|
905
|
+
}
|
|
906
|
+
let leftStr = `substr(${rawText}, 1, position(${term} IN ${matchText}) - 1 )`,
|
|
907
|
+
rightStr = `substr(${rawText}, position(${term} IN ${matchText}) + length(${term}) )`;
|
|
908
|
+
if(edgeTruncate){
|
|
909
|
+
leftStr = `RIGHT(${leftStr}, ${asValue(edgeTruncate)})`;
|
|
910
|
+
rightStr = `LEFT(${rightStr}, ${asValue(edgeTruncate)})`
|
|
911
|
+
}
|
|
912
|
+
return `
|
|
913
|
+
CASE WHEN position(${term} IN ${matchText}) > 0 AND ${term} <> ''
|
|
914
|
+
THEN array_to_json(ARRAY[
|
|
915
|
+
to_json( ${leftStr}::TEXT ),
|
|
916
|
+
array_to_json(
|
|
917
|
+
ARRAY[substr(${rawText}, position(${term} IN ${matchText}), length(${term}) )::TEXT ]
|
|
918
|
+
),
|
|
919
|
+
to_json(${rightStr}::TEXT )
|
|
920
|
+
])
|
|
921
|
+
ELSE
|
|
922
|
+
array_to_json(ARRAY[(${rawText})::TEXT])
|
|
923
|
+
END
|
|
924
|
+
`;
|
|
925
|
+
}
|
|
926
|
+
|
|
927
|
+
const colRaw = "( " + cols.map(c =>`${noFields? "" : (asValue(c + ": ") + " || ")} COALESCE(${asNameAlias(c, tableAlias)}::TEXT, '')`).join(" || ', ' || ") + " )";
|
|
928
|
+
let col = colRaw;
|
|
929
|
+
term = asValue(term);
|
|
930
|
+
if(!matchCase) {
|
|
931
|
+
col = "LOWER" + col;
|
|
932
|
+
term = `LOWER(${term})`
|
|
933
|
+
}
|
|
934
|
+
|
|
935
|
+
let leftStr = `substr(${colRaw}, 1, position(${term} IN ${col}) - 1 )`,
|
|
936
|
+
rightStr = `substr(${colRaw}, position(${term} IN ${col}) + length(${term}) )`;
|
|
937
|
+
if(edgeTruncate){
|
|
938
|
+
leftStr = `RIGHT(${leftStr}, ${asValue(edgeTruncate)})`;
|
|
939
|
+
rightStr = `LEFT(${rightStr}, ${asValue(edgeTruncate)})`
|
|
940
|
+
}
|
|
941
|
+
|
|
942
|
+
// console.log(col);
|
|
943
|
+
let res = ""
|
|
944
|
+
if(returnType === "index"){
|
|
945
|
+
res = `CASE WHEN position(${term} IN ${col}) > 0 THEN position(${term} IN ${col}) - 1 ELSE -1 END`;
|
|
946
|
+
|
|
947
|
+
// } else if(returnType === "boolean"){
|
|
948
|
+
// res = `CASE WHEN position(${term} IN ${col}) > 0 THEN TRUE ELSE FALSE END`;
|
|
949
|
+
|
|
950
|
+
} else if(returnType === "object" || returnType === "boolean"){
|
|
951
|
+
const hasChars = Boolean(rawTerm && /[a-z]/i.test(rawTerm));
|
|
952
|
+
const validCols = cols.map(c => {
|
|
953
|
+
const colInfo = allColumns.find(ac => ac.name === c);
|
|
954
|
+
return {
|
|
955
|
+
key: c,
|
|
956
|
+
colInfo
|
|
957
|
+
}
|
|
958
|
+
})
|
|
959
|
+
.filter(c => c.colInfo && c.colInfo.udt_name !== "bytea")
|
|
960
|
+
|
|
961
|
+
const _cols = validCols.filter(c =>
|
|
962
|
+
/** Exclude numeric columns when the search tern contains a character */
|
|
963
|
+
!hasChars ||
|
|
964
|
+
postgresToTsType(c.colInfo!.udt_name) !== "number"
|
|
965
|
+
);
|
|
966
|
+
|
|
967
|
+
/** This will break GROUP BY (non-integer constant in GROUP BY) */
|
|
968
|
+
if(!_cols.length){
|
|
969
|
+
if(validCols.length && hasChars) throw `You're searching the impossible: characters in numeric fields. Use this to prevent making such a request in future: /[a-z]/i.test(your_term) `
|
|
970
|
+
return (returnType === "boolean")? "FALSE" : "NULL"
|
|
971
|
+
}
|
|
972
|
+
res = `CASE
|
|
973
|
+
${_cols
|
|
974
|
+
.map(c => {
|
|
975
|
+
const colNameEscaped = asNameAlias(c.key, tableAlias)
|
|
976
|
+
let colSelect = `${colNameEscaped}::TEXT`;
|
|
977
|
+
const isTstamp = c.colInfo?.udt_name.startsWith("timestamp");
|
|
978
|
+
if(isTstamp || c.colInfo?.udt_name === "date"){
|
|
979
|
+
colSelect = `( CASE WHEN ${colNameEscaped} IS NULL THEN ''
|
|
980
|
+
ELSE concat_ws(' ',
|
|
981
|
+
trim(to_char(${colNameEscaped}, 'YYYY-MM-DD HH24:MI:SS')),
|
|
982
|
+
trim(to_char(${colNameEscaped}, 'Day Month')),
|
|
983
|
+
'Q' || trim(to_char(${colNameEscaped}, 'Q')),
|
|
984
|
+
'WK' || trim(to_char(${colNameEscaped}, 'WW'))
|
|
985
|
+
) END)`
|
|
986
|
+
}
|
|
987
|
+
const colTxt = `COALESCE(${colSelect}, '')`; // position(${term} IN ${colTxt}) > 0
|
|
988
|
+
if(returnType === "boolean"){
|
|
989
|
+
return `
|
|
990
|
+
WHEN ${colTxt} ${matchCase? "LIKE" : "ILIKE"} ${asValue('%' + rawTerm + '%')}
|
|
991
|
+
THEN TRUE
|
|
992
|
+
`
|
|
993
|
+
}
|
|
994
|
+
return `
|
|
995
|
+
WHEN ${colTxt} ${matchCase? "LIKE" : "ILIKE"} ${asValue('%' + rawTerm + '%')}
|
|
996
|
+
THEN json_build_object(
|
|
997
|
+
${asValue(c.key)},
|
|
998
|
+
${makeTextMatcherArray(
|
|
999
|
+
colTxt,
|
|
1000
|
+
term
|
|
1001
|
+
)}
|
|
1002
|
+
)::jsonb
|
|
1003
|
+
`
|
|
1004
|
+
}).join(" ")}
|
|
1005
|
+
ELSE ${(returnType === "boolean")? "FALSE" : "NULL"}
|
|
1006
|
+
|
|
1007
|
+
END`;
|
|
1008
|
+
|
|
1009
|
+
// console.log(res)
|
|
1010
|
+
} else {
|
|
1011
|
+
/* If no match or empty search THEN return full row as string within first array element */
|
|
1012
|
+
res = `CASE WHEN position(${term} IN ${col}) > 0 AND ${term} <> '' THEN array_to_json(ARRAY[
|
|
1013
|
+
to_json( ${leftStr}::TEXT ),
|
|
1014
|
+
array_to_json(
|
|
1015
|
+
ARRAY[substr(${colRaw}, position(${term} IN ${col}), length(${term}) )::TEXT ]
|
|
1016
|
+
),
|
|
1017
|
+
to_json(${rightStr}::TEXT )
|
|
1018
|
+
]) ELSE array_to_json(ARRAY[(${colRaw})::TEXT]) END`;
|
|
1019
|
+
|
|
1020
|
+
}
|
|
1021
|
+
|
|
1022
|
+
return res;
|
|
1023
|
+
}
|
|
1024
|
+
},
|
|
1025
|
+
|
|
1026
|
+
/* Aggs */
|
|
1027
|
+
...["max", "min", "count", "avg", "json_agg", "jsonb_agg", "string_agg", "array_agg", "sum"].map(aggName => ({
|
|
1028
|
+
name: "$" + aggName,
|
|
1029
|
+
type: "aggregation",
|
|
1030
|
+
numArgs: 1,
|
|
1031
|
+
singleColArg: true,
|
|
1032
|
+
getFields: (args: any[]) => [args[0]],
|
|
1033
|
+
getQuery: ({ args, tableAlias }) => {
|
|
1034
|
+
let extraArgs = "";
|
|
1035
|
+
if(args.length > 1){
|
|
1036
|
+
extraArgs = pgp.as.format(", $1:csv", args.slice(1))
|
|
1037
|
+
}
|
|
1038
|
+
return aggName + "(" + asNameAlias(args[0], tableAlias) + `${extraArgs})`;
|
|
1039
|
+
}
|
|
1040
|
+
} satisfies FunctionSpec)),
|
|
1041
|
+
|
|
1042
|
+
{
|
|
1043
|
+
name: "$jsonb_build_object",
|
|
1044
|
+
type: "function",
|
|
1045
|
+
numArgs: 22,
|
|
1046
|
+
minCols: 1,
|
|
1047
|
+
singleColArg: false,
|
|
1048
|
+
getFields: args => args,
|
|
1049
|
+
getQuery: ({ args, tableAlias }) => {
|
|
1050
|
+
return `jsonb_build_object(${args.flatMap(arg => [asValue(arg), asNameAlias(arg, tableAlias)]).join(", ")})`;
|
|
1051
|
+
}
|
|
1052
|
+
},
|
|
1053
|
+
|
|
1054
|
+
/* More aggs */
|
|
1055
|
+
{
|
|
1056
|
+
name: "$countAll",
|
|
1057
|
+
type: "aggregation",
|
|
1058
|
+
description: `agg :[] COUNT of all rows `,
|
|
1059
|
+
singleColArg: true,
|
|
1060
|
+
numArgs: 0,
|
|
1061
|
+
getFields: (args: any[]) => [],
|
|
1062
|
+
getQuery: ({ allowedFields, args, tableAlias }) => {
|
|
1063
|
+
return "COUNT(*)";
|
|
1064
|
+
}
|
|
1065
|
+
} as FunctionSpec,
|
|
1066
|
+
{
|
|
1067
|
+
name: "$diff_perc",
|
|
1068
|
+
type: "aggregation",
|
|
1069
|
+
numArgs: 1,
|
|
1070
|
+
singleColArg: true,
|
|
1071
|
+
getFields: (args: any[]) => [args[0]],
|
|
1072
|
+
getQuery: ({ allowedFields, args, tableAlias }) => {
|
|
1073
|
+
const col = asNameAlias(args[0], tableAlias);
|
|
1074
|
+
return `round( ( ( MAX(${col}) - MIN(${col}) )::float/MIN(${col}) ) * 100, 2)`
|
|
1075
|
+
}
|
|
1076
|
+
} as FunctionSpec
|
|
1077
|
+
];
|
|
1078
|
+
|
|
1079
|
+
/* The difference between a function and computed field is that the computed field does not require any arguments */
|
|
1080
|
+
export const COMPUTED_FIELDS: FieldSpec[] = [
|
|
1081
|
+
|
|
1082
|
+
/**
|
|
1083
|
+
* Used instead of row id. Must be used as a last resort. Use all non pseudo or domain data type columns first!
|
|
1084
|
+
*/
|
|
1085
|
+
{
|
|
1086
|
+
name: "$rowhash",
|
|
1087
|
+
type: "computed",
|
|
1088
|
+
// description: ` order hash of row content `,
|
|
1089
|
+
getQuery: ({ allowedFields, tableAlias, ctidField }) => {
|
|
1090
|
+
return "md5(" +
|
|
1091
|
+
allowedFields
|
|
1092
|
+
|
|
1093
|
+
/* CTID not available in AFTER trigger */
|
|
1094
|
+
// .concat(ctidField? [ctidField] : [])
|
|
1095
|
+
.sort()
|
|
1096
|
+
.map(f => asNameAlias(f, tableAlias))
|
|
1097
|
+
.map(f => `md5(coalesce(${f}::text, 'dd'))`)
|
|
1098
|
+
.join(" || ") +
|
|
1099
|
+
`)`;
|
|
1100
|
+
}
|
|
1101
|
+
}
|
|
1102
|
+
// ,{
|
|
1103
|
+
// name: "ctid",
|
|
1104
|
+
// type: "computed",
|
|
1105
|
+
// // description: ` order hash of row content `,
|
|
1106
|
+
// getQuery: ({ allowedFields, tableAlias, ctidField }) => {
|
|
1107
|
+
// return asNameAlias("ctid", tableAlias);
|
|
1108
|
+
// }
|
|
1109
|
+
// }
|
|
1110
|
+
];
|
|
1111
|
+
|
|
1112
|
+
/*
|
|
1113
|
+
|
|
1114
|
+
|
|
1115
|
+
get key val pairs:
|
|
1116
|
+
obj.key.path value
|
|
1117
|
+
|
|
1118
|
+
|
|
1119
|
+
WITH RECURSIVE extract_all AS (
|
|
1120
|
+
select
|
|
1121
|
+
key as path,
|
|
1122
|
+
jsonb_typeof(value) as type,
|
|
1123
|
+
CASE WHEN trim(jsonb_typeof(value)) = 'array' THEN jsonb_typeof(value->0) END as elem_type,
|
|
1124
|
+
value
|
|
1125
|
+
from (SELECT * FROM mytable LIMIT 1) zzzzz
|
|
1126
|
+
cross join lateral jsonb_each(jdata)
|
|
1127
|
+
union all
|
|
1128
|
+
select
|
|
1129
|
+
path || '.' || coalesce(obj_key, (arr_key- 1)::text),
|
|
1130
|
+
jsonb_typeof(coalesce(obj_value, arr_value)) as type,
|
|
1131
|
+
CASE WHEN jsonb_typeof(coalesce(obj_value, arr_value)) = 'array' THEN jsonb_typeof(coalesce(obj_value, arr_value)->0) END as elem_type,
|
|
1132
|
+
coalesce(obj_value, arr_value)
|
|
1133
|
+
from extract_all
|
|
1134
|
+
left join lateral
|
|
1135
|
+
jsonb_each(case jsonb_typeof(value) when 'object' then value end)
|
|
1136
|
+
as o(obj_key, obj_value)
|
|
1137
|
+
on jsonb_typeof(value) = 'object'
|
|
1138
|
+
left join lateral
|
|
1139
|
+
jsonb_array_elements(case jsonb_typeof(value) when 'array' then value end)
|
|
1140
|
+
with ordinality as a(arr_value, arr_key)
|
|
1141
|
+
on jsonb_typeof(value) = 'array'
|
|
1142
|
+
where obj_key is not null or arr_key is not null
|
|
1143
|
+
)
|
|
1144
|
+
SELECT *, array_length(string_to_array(path, '.'), 1) - 1 as depth
|
|
1145
|
+
FROM extract_all t1
|
|
1146
|
+
WHERE NOT EXISTS ( --Keep only leaf values
|
|
1147
|
+
SELECT 1
|
|
1148
|
+
FROM extract_all t2
|
|
1149
|
+
WHERE length(t1.path) < length(t2.path)
|
|
1150
|
+
AND starts_with(t2.path, t1.path)
|
|
1151
|
+
);
|
|
1152
|
+
|
|
1153
|
+
*/
|