actn-db 0.0.2 → 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/db/lib/_0_actn.js +1 -1
- data/db/lib/_4_builder.coffee +1 -1
- data/db/lib/_4_builder.js +1 -1
- data/lib/actn/db/version.rb +1 -1
- metadata +1 -3
- data/db/lib/_0_actn_f.coffee +0 -381
- data/db/lib/_0_actn_f.js +0 -403
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 5ca2965b4c761b78728413b0c7a0fef06bbb1432
|
4
|
+
data.tar.gz: 04739049558b259764222239754d5f35230ae0d1
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 01e20e754b6e96bcd1e796cab2dd167073d735ec8c44b1fe1b802fbd712720cc4a83e3f262a79cb42589d5c5c0d015de6a708a2d2f8d02989358745cb83106a1
|
7
|
+
data.tar.gz: 570c971b36b4337645a7ff0bac4fb074848b1a065341bc4fcbabaeff20353edcc0f962af30c806f73a96273484cfd8b0178a6e147ad6a9ddd32e44b3b7eeb41f
|
data/db/lib/_0_actn.js
CHANGED
data/db/lib/_4_builder.coffee
CHANGED
data/db/lib/_4_builder.js
CHANGED
data/lib/actn/db/version.rb
CHANGED
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: actn-db
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Onur Uyar
|
@@ -154,8 +154,6 @@ files:
|
|
154
154
|
- db/__functions.sql
|
155
155
|
- db/__setup.sql
|
156
156
|
- db/lib/_0_actn.js
|
157
|
-
- db/lib/_0_actn_f.coffee
|
158
|
-
- db/lib/_0_actn_f.js
|
159
157
|
- db/lib/_1_underscore.js
|
160
158
|
- db/lib/_2_jjv.js
|
161
159
|
- db/lib/_3_inflections.js
|
data/db/lib/_0_actn_f.coffee
DELETED
@@ -1,381 +0,0 @@
|
|
1
|
-
class Funcs
|
2
|
-
|
3
|
-
__json: (_data, _key) ->
|
4
|
-
ret = actn.valueAt(_data,_key)
|
5
|
-
return null unless ret?
|
6
|
-
return JSON.stringify(ret)
|
7
|
-
|
8
|
-
__string: (_data, _key) ->
|
9
|
-
ret = actn.valueAt(_data,_key)
|
10
|
-
return null unless ret?
|
11
|
-
return ret.toString()
|
12
|
-
|
13
|
-
__integer: (_data, _key) ->
|
14
|
-
ret = actn.valueAt(_data,_key)
|
15
|
-
return null unless ret?
|
16
|
-
return parseInt(ret)
|
17
|
-
|
18
|
-
__integer_array: (_data, _key) ->
|
19
|
-
ret = actn.valueAt(_data,_key)
|
20
|
-
return null unless ret?
|
21
|
-
return (if ret instanceof Array then ret else [ret])
|
22
|
-
|
23
|
-
__float: (_data, _key) ->
|
24
|
-
ret = actn.valueAt(_data,_key)
|
25
|
-
return null unless ret?
|
26
|
-
return parseFloat(ret)
|
27
|
-
|
28
|
-
__bool: (_data, _key) ->
|
29
|
-
ret = actn.valueAt(_data,_key)
|
30
|
-
return null unless ret?
|
31
|
-
return !!ret
|
32
|
-
|
33
|
-
__timestamp: (_data, _key) ->
|
34
|
-
ret = actn.valueAt(_data,_key)
|
35
|
-
return null unless ret?
|
36
|
-
return new Date(ret)
|
37
|
-
|
38
|
-
__patch: (_data, _value, _sync) ->
|
39
|
-
|
40
|
-
data = _data
|
41
|
-
changes = _value
|
42
|
-
isObject = false
|
43
|
-
|
44
|
-
sync = if _sync? then _sync else true
|
45
|
-
|
46
|
-
defaults = _.pick( data, _.keys( JSON.parse( plv8.find_function('__defaults')() ) ) )
|
47
|
-
|
48
|
-
for k of changes
|
49
|
-
if data.hasOwnProperty(k)
|
50
|
-
isObject = typeof (data[k]) is "object" and typeof (changes[k]) is "object"
|
51
|
-
data[k] = if isObject and sync then _.extend(data[k], changes[k]) else changes[k]
|
52
|
-
else
|
53
|
-
data[k] = changes[k]
|
54
|
-
|
55
|
-
unless sync
|
56
|
-
for k of data
|
57
|
-
delete data[k] unless changes[k]?
|
58
|
-
|
59
|
-
_.extend(data, defaults)
|
60
|
-
|
61
|
-
return JSON.stringify(data)
|
62
|
-
|
63
|
-
__select: (_data, _fields) ->
|
64
|
-
data = _data
|
65
|
-
fields = _fields
|
66
|
-
ret = _.pick(data,fields.split(","))
|
67
|
-
|
68
|
-
return JSON.stringify(ret)
|
69
|
-
|
70
|
-
__push: (_data, _key, _value) ->
|
71
|
-
data = _data
|
72
|
-
value = _value
|
73
|
-
keys = _key.split(".")
|
74
|
-
len = keys.length
|
75
|
-
last_field = data
|
76
|
-
field = data
|
77
|
-
i = 0
|
78
|
-
|
79
|
-
while i < len
|
80
|
-
last_field = field
|
81
|
-
field = field[keys[i]] if field
|
82
|
-
++i
|
83
|
-
if field
|
84
|
-
field.push value
|
85
|
-
else
|
86
|
-
value = [value] unless value instanceof Array
|
87
|
-
last_field[keys.pop()] = value
|
88
|
-
|
89
|
-
return JSON.stringify(data)
|
90
|
-
|
91
|
-
__uuid: () ->
|
92
|
-
ary = plv8.execute 'SELECT uuid_generate_v4() as uuid;'
|
93
|
-
return JSON.stringify(ary[0])
|
94
|
-
|
95
|
-
__defaults: () ->
|
96
|
-
uuid = JSON.parse(plv8.find_function('__uuid')())
|
97
|
-
timestamp = new Date()
|
98
|
-
return JSON.stringify({uuid: uuid.uuid, created_at: timestamp, updated_at: timestamp})
|
99
|
-
|
100
|
-
__create_table: (schema_name, table_name) ->
|
101
|
-
plv8.execute """
|
102
|
-
CREATE TABLE #{schema_name}.#{table_name} (
|
103
|
-
id serial NOT NULL,
|
104
|
-
data json DEFAULT __uuid() NOT NULL,
|
105
|
-
CONSTRAINT #{schema_name}_#{table_name}_pkey PRIMARY KEY (id));
|
106
|
-
|
107
|
-
CREATE UNIQUE INDEX indx_#{schema_name}_#{table_name}_unique_uuid ON #{schema_name}.#{table_name} (__string(data,'uuid'));
|
108
|
-
"""
|
109
|
-
return JSON.stringify(table_name)
|
110
|
-
|
111
|
-
__drop_table: (schema_name, table_name) ->
|
112
|
-
plv8.execute "DROP TABLE IF EXISTS #{schema_name}.#{table_name} CASCADE;"
|
113
|
-
return JSON.stringify(table_name)
|
114
|
-
|
115
|
-
__create_index: (schema_name, table_name, optns) ->
|
116
|
-
index_name = "indx_#{schema_name}_#{table_name}"
|
117
|
-
for name, type of optns.cols
|
118
|
-
index_name += "_#{name}"
|
119
|
-
|
120
|
-
sql = ["CREATE"]
|
121
|
-
sql.push "UNIQUE" if optns.unique
|
122
|
-
sql.push "INDEX"
|
123
|
-
sql.push "CONCURRENTLY" if optns.concurrently
|
124
|
-
sql.push "#{index_name} on #{schema_name}.#{table_name}"
|
125
|
-
sql.push "("
|
126
|
-
cols = []
|
127
|
-
for name, type of optns.cols
|
128
|
-
meth = "__#{if type is 'text' then 'string' else type}"
|
129
|
-
cols.push "#{meth}(data,'#{name}'::#{type})"
|
130
|
-
sql.push cols.join(",")
|
131
|
-
sql.push ")"
|
132
|
-
|
133
|
-
sql = sql.join(" ")
|
134
|
-
|
135
|
-
plv8.execute(sql)
|
136
|
-
|
137
|
-
return JSON.stringify(index_name)
|
138
|
-
|
139
|
-
__drop_index: (schema_name, table_name, optns) ->
|
140
|
-
index_name = "indx_#{schema_name}_#{table_name}"
|
141
|
-
for name, type of optns.cols
|
142
|
-
index_name += "_#{name}"
|
143
|
-
|
144
|
-
plv8.execute("DROP INDEX IF EXISTS #{index_name}")
|
145
|
-
|
146
|
-
return JSON.stringify(index_name)
|
147
|
-
|
148
|
-
__query: (_schema_name, _table_name, _query) ->
|
149
|
-
search_path = if _schema_name is "public" then _schema_name else "#{_schema_name}, public"
|
150
|
-
|
151
|
-
builder = new actn.Builder(_schema_name, _table_name, search_path, _query)
|
152
|
-
|
153
|
-
[sql,params] = builder.build_select()
|
154
|
-
|
155
|
-
rows = plv8.execute(sql,params)
|
156
|
-
|
157
|
-
builder = null
|
158
|
-
|
159
|
-
if _query?.select?.indexOf('COUNT') > -1
|
160
|
-
result = rows
|
161
|
-
else
|
162
|
-
result = _.pluck(rows,'data')
|
163
|
-
|
164
|
-
|
165
|
-
return JSON.stringify(result)
|
166
|
-
|
167
|
-
__upsert: (_schema_name, _table_name, _data) ->
|
168
|
-
# plv8.elog(NOTICE,"UPSERT",JSON.stringify(_data))
|
169
|
-
|
170
|
-
return JSON.stringify(_data) if _data.errors?
|
171
|
-
|
172
|
-
data = _data
|
173
|
-
|
174
|
-
search_path = if _schema_name is "public" then _schema_name else "#{_schema_name},public"
|
175
|
-
|
176
|
-
if data.uuid?
|
177
|
-
|
178
|
-
query = { where: { uuid: data.uuid } }
|
179
|
-
|
180
|
-
builder = new actn.Builder(_schema_name, _table_name, search_path, query )
|
181
|
-
|
182
|
-
[sql,params] = builder.build_update(data)
|
183
|
-
|
184
|
-
else
|
185
|
-
|
186
|
-
builder = new actn.Builder(_schema_name, _table_name, search_path, {})
|
187
|
-
|
188
|
-
[sql,params] = builder.build_insert(data)
|
189
|
-
|
190
|
-
|
191
|
-
# plan = plv8.prepare(sql, ['json','bool','text'])
|
192
|
-
|
193
|
-
# plv8.elog(NOTICE,sql,JSON.stringify(params))
|
194
|
-
|
195
|
-
rows = plv8.execute(sql, params)
|
196
|
-
|
197
|
-
result = _.pluck(rows,'data')
|
198
|
-
|
199
|
-
result = result[0] if result.length is 1
|
200
|
-
|
201
|
-
builder = null
|
202
|
-
|
203
|
-
return JSON.stringify(result)
|
204
|
-
|
205
|
-
__update: (_schema_name, _table_name, _data, _cond) ->
|
206
|
-
return JSON.stringify(_data) if _data.errors?
|
207
|
-
|
208
|
-
search_path = if _schema_name is "public" then _schema_name else "#{_schema_name},public"
|
209
|
-
|
210
|
-
builder = new actn.Builder(_schema_name, _table_name, search_path, {where: _cond})
|
211
|
-
|
212
|
-
[sql,params] = builder.build_update(_data)
|
213
|
-
|
214
|
-
rows = plv8.execute(sql,params)
|
215
|
-
result = _.pluck(rows,'data')
|
216
|
-
result = result[0] if result.length is 1
|
217
|
-
|
218
|
-
builder = null
|
219
|
-
|
220
|
-
return JSON.stringify(result)
|
221
|
-
|
222
|
-
__delete: (_schema_name, _table_name, _cond) ->
|
223
|
-
search_path = if _schema_name is "public" then _schema_name else "#{_schema_name},public"
|
224
|
-
|
225
|
-
builder = new actn.Builder(_schema_name, _table_name, search_path, {where: _cond})
|
226
|
-
|
227
|
-
[sql,params] = builder.build_delete()
|
228
|
-
|
229
|
-
# plv8.elog(NOTICE,"DELETE",sql,params)
|
230
|
-
|
231
|
-
rows = plv8.execute(sql,params)
|
232
|
-
result = _.pluck(rows,'data')
|
233
|
-
result = result[0] if result.length is 1
|
234
|
-
|
235
|
-
builder = null
|
236
|
-
|
237
|
-
return JSON.stringify(result)
|
238
|
-
|
239
|
-
__validate: (_name, _data) ->
|
240
|
-
data = _data
|
241
|
-
|
242
|
-
# plv8.elog(NOTICE,"__VALIDATE",_name,JSON.stringify(_data))
|
243
|
-
|
244
|
-
return data unless model = plv8.find_function('__find_model')(_name)
|
245
|
-
|
246
|
-
model = JSON.parse(model)
|
247
|
-
|
248
|
-
# plv8.elog(NOTICE,"__VALIDATE MODEL",_name,JSON.stringify(model))
|
249
|
-
|
250
|
-
if model?.schema?
|
251
|
-
|
252
|
-
errors = actn.jjv.validate(model.schema,data)
|
253
|
-
|
254
|
-
plv8.elog(NOTICE,"VALVAL",JSON.stringify(model.schema))
|
255
|
-
|
256
|
-
if data.uuid? and model.schema.readonly_attributes?
|
257
|
-
|
258
|
-
data = _.omit(data,model.schema.readonly_attributes)
|
259
|
-
|
260
|
-
# plv8.elog(NOTICE,"VALIDATE READONLY",JSON.stringify(data),JSON.stringify(model.schema.readonly_attributes))
|
261
|
-
|
262
|
-
|
263
|
-
else if model.schema.unique_attributes?
|
264
|
-
|
265
|
-
_schema = if _name is "Model" then "core" else "public"
|
266
|
-
_table = model.name.tableize()
|
267
|
-
__query = plv8.find_function("__query")
|
268
|
-
|
269
|
-
for uniq_attr in model.schema.unique_attributes or []
|
270
|
-
if data[uniq_attr]?
|
271
|
-
where = {}
|
272
|
-
where[uniq_attr] = data[uniq_attr]
|
273
|
-
# plv8.elog(NOTICE,"VALIDATE WHERE",JSON.stringify({where: where}))
|
274
|
-
found = JSON.parse(__query(_schema,_table,{where: where}))
|
275
|
-
# plv8.elog(NOTICE,"VALIDATE FOUND",JSON.stringify(found))
|
276
|
-
unless _.isEmpty(found)
|
277
|
-
errors ?= {validation: {}}
|
278
|
-
errors['validation'][uniq_attr] ?= {}
|
279
|
-
errors['validation'][uniq_attr]["has already been taken"] = true
|
280
|
-
|
281
|
-
data = {errors: errors} if errors?
|
282
|
-
|
283
|
-
# plv8.elog(NOTICE,"__VALIDATE DATA",_name,JSON.stringify(data))
|
284
|
-
|
285
|
-
return data
|
286
|
-
|
287
|
-
__find_model: (_name) ->
|
288
|
-
rows = plv8.execute("""SET search_path TO core,public;
|
289
|
-
SELECT data FROM core.models
|
290
|
-
WHERE __string(data,'name'::text) = $1::text""", [_name])
|
291
|
-
|
292
|
-
return unless rows?
|
293
|
-
|
294
|
-
result = _.pluck(rows,'data')[0]
|
295
|
-
|
296
|
-
return JSON.stringify(result)
|
297
|
-
|
298
|
-
model_callbacks: (TG_OP, NEW, OLD) ->
|
299
|
-
table_name = (NEW?.data?.name or OLD?.data?.name)?.tableize()
|
300
|
-
table_schema = (NEW?.data?.table_schema or OLD?.data?.table_schema) or "public"
|
301
|
-
|
302
|
-
return if table_schema is "core"
|
303
|
-
|
304
|
-
# plv8.elog(NOTICE,"MODEL CALLBACKS",table_schema,JSON.stringify(NEW?.data or OLD?.data))
|
305
|
-
|
306
|
-
mapper = (ind) -> _.keys(ind.cols)
|
307
|
-
differ = (_ind) ->
|
308
|
-
(ind) ->
|
309
|
-
_.isEmpty( _.difference( _.keys(ind.cols), _.flatten( _.map( _ind.data?.indexes, mapper ) ) ) )
|
310
|
-
|
311
|
-
switch TG_OP
|
312
|
-
when "INSERT"
|
313
|
-
plv8.execute "SELECT __create_table($1,$2)",[table_schema , table_name]
|
314
|
-
|
315
|
-
plv8.execute "SELECT __create_index($1,$2,$3)", [table_schema, table_name, {cols: {path: "text" }}]
|
316
|
-
|
317
|
-
for indopts in NEW?.data?.indexes or []
|
318
|
-
plv8.execute "SELECT __create_index($1,$2,$3)", [table_schema, table_name, indopts]
|
319
|
-
|
320
|
-
when "UPDATE"
|
321
|
-
|
322
|
-
diff = _.reject( OLD?.data?.indexes, differ(NEW) )
|
323
|
-
|
324
|
-
for indopts in diff
|
325
|
-
plv8.execute "SELECT __drop_index($1,$2,$3)", [table_schema, table_name, indopts]
|
326
|
-
|
327
|
-
diff = _.reject( NEW?.data?.indexes, differ(OLD) )
|
328
|
-
|
329
|
-
for indopts in diff
|
330
|
-
plv8.execute "SELECT __create_index($1,$2,$3)", [table_schema, table_name, indopts]
|
331
|
-
|
332
|
-
when "DELETE"
|
333
|
-
for indopts in Old?.data?.indexes or []
|
334
|
-
plv8.execute "SELECT __drop_index($1,$2,$3)", [table_schema, table_name, indopts]
|
335
|
-
plv8.execute "SELECT __drop_table($1,$2)",[table_schema , table_name]
|
336
|
-
|
337
|
-
hook_trigger: (TG_TABLE_NAME, TG_OP, NEW, OLD) ->
|
338
|
-
upsert_func = plv8.find_function("__upsert")
|
339
|
-
model = JSON.parse(plv8.find_function("__find_model")(TG_TABLE_NAME.classify()))
|
340
|
-
|
341
|
-
callback = {
|
342
|
-
INSERT: "after_create"
|
343
|
-
UPDATE: "after_update"
|
344
|
-
DELETE: "after_destroy"
|
345
|
-
}[TG_OP]
|
346
|
-
|
347
|
-
# plv8.elog(NOTICE,"HOOK TRIGGER",JSON.stringify(model))
|
348
|
-
|
349
|
-
for hook in model?.hooks?[callback] or []
|
350
|
-
hook.run_at ?= new Date()
|
351
|
-
hook.callback = callback
|
352
|
-
|
353
|
-
job =
|
354
|
-
hook: hook
|
355
|
-
table_name: TG_TABLE_NAME
|
356
|
-
record_uuid: NEW?.data?.uuid or OLD?.data?.uuid
|
357
|
-
record: OLD.data if TG_OP is "DELETE"
|
358
|
-
|
359
|
-
res = upsert_func "core", "jobs", JSON.stringify(job)
|
360
|
-
plv8.execute "SELECT pg_notify('jobs', $1);", [res]
|
361
|
-
|
362
|
-
jobs_model_callbacks: (TG_TABLE_NAME, TG_OP, NEW, OLD) ->
|
363
|
-
table_name = (NEW?.data?.name or OLD?.data?.name).tableize()
|
364
|
-
|
365
|
-
table_schema = (NEW?.data?.table_schema or OLD?.data?.table_schema) or "public"
|
366
|
-
|
367
|
-
return if table_schema is "core"
|
368
|
-
|
369
|
-
if TG_OP is "DELETE"
|
370
|
-
plv8.execute "DELETE FROM core.jobs WHERE __string(data, 'table_name'::text) = $1;", [table_name]
|
371
|
-
|
372
|
-
# if TG_OP is "DELETE" and OLD.data.hooks? or TG_OP is "UPDATE" and NEW.data.hooks?
|
373
|
-
# plv8.execute "DROP TRIGGER IF EXISTS #{table_schema}_#{table_name}_hook_trigger ON #{table_schema}.#{table_name}"
|
374
|
-
|
375
|
-
|
376
|
-
if TG_OP is "INSERT" or TG_OP is "UPDATE" and NEW.data.hooks? and not OLD.data.hooks?
|
377
|
-
plv8.execute """CREATE TRIGGER #{table_schema}_#{table_name}_hook_trigger
|
378
|
-
AFTER INSERT OR UPDATE OR DELETE ON #{table_schema}.#{table_name}
|
379
|
-
FOR EACH ROW EXECUTE PROCEDURE hook_trigger();"""
|
380
|
-
|
381
|
-
actn.funcs = new Funcs
|
data/db/lib/_0_actn_f.js
DELETED
@@ -1,403 +0,0 @@
|
|
1
|
-
// Generated by CoffeeScript 1.6.3
|
2
|
-
(function() {
|
3
|
-
var Funcs;
|
4
|
-
|
5
|
-
Funcs = (function() {
|
6
|
-
function Funcs() {}
|
7
|
-
|
8
|
-
Funcs.prototype.__json = function(_data, _key) {
|
9
|
-
var ret;
|
10
|
-
ret = actn.valueAt(_data, _key);
|
11
|
-
if (ret == null) {
|
12
|
-
return null;
|
13
|
-
}
|
14
|
-
return JSON.stringify(ret);
|
15
|
-
};
|
16
|
-
|
17
|
-
Funcs.prototype.__string = function(_data, _key) {
|
18
|
-
var ret;
|
19
|
-
ret = actn.valueAt(_data, _key);
|
20
|
-
if (ret == null) {
|
21
|
-
return null;
|
22
|
-
}
|
23
|
-
return ret.toString();
|
24
|
-
};
|
25
|
-
|
26
|
-
Funcs.prototype.__integer = function(_data, _key) {
|
27
|
-
var ret;
|
28
|
-
ret = actn.valueAt(_data, _key);
|
29
|
-
if (ret == null) {
|
30
|
-
return null;
|
31
|
-
}
|
32
|
-
return parseInt(ret);
|
33
|
-
};
|
34
|
-
|
35
|
-
Funcs.prototype.__integer_array = function(_data, _key) {
|
36
|
-
var ret;
|
37
|
-
ret = actn.valueAt(_data, _key);
|
38
|
-
if (ret == null) {
|
39
|
-
return null;
|
40
|
-
}
|
41
|
-
return (ret instanceof Array ? ret : [ret]);
|
42
|
-
};
|
43
|
-
|
44
|
-
Funcs.prototype.__float = function(_data, _key) {
|
45
|
-
var ret;
|
46
|
-
ret = actn.valueAt(_data, _key);
|
47
|
-
if (ret == null) {
|
48
|
-
return null;
|
49
|
-
}
|
50
|
-
return parseFloat(ret);
|
51
|
-
};
|
52
|
-
|
53
|
-
Funcs.prototype.__bool = function(_data, _key) {
|
54
|
-
var ret;
|
55
|
-
ret = actn.valueAt(_data, _key);
|
56
|
-
if (ret == null) {
|
57
|
-
return null;
|
58
|
-
}
|
59
|
-
return !!ret;
|
60
|
-
};
|
61
|
-
|
62
|
-
Funcs.prototype.__timestamp = function(_data, _key) {
|
63
|
-
var ret;
|
64
|
-
ret = actn.valueAt(_data, _key);
|
65
|
-
if (ret == null) {
|
66
|
-
return null;
|
67
|
-
}
|
68
|
-
return new Date(ret);
|
69
|
-
};
|
70
|
-
|
71
|
-
Funcs.prototype.__patch = function(_data, _value, _sync) {
|
72
|
-
var changes, data, defaults, isObject, k, sync;
|
73
|
-
data = _data;
|
74
|
-
changes = _value;
|
75
|
-
isObject = false;
|
76
|
-
sync = _sync != null ? _sync : true;
|
77
|
-
defaults = _.pick(data, _.keys(JSON.parse(plv8.find_function('__defaults')())));
|
78
|
-
for (k in changes) {
|
79
|
-
if (data.hasOwnProperty(k)) {
|
80
|
-
isObject = typeof data[k] === "object" && typeof changes[k] === "object";
|
81
|
-
data[k] = isObject && sync ? _.extend(data[k], changes[k]) : changes[k];
|
82
|
-
} else {
|
83
|
-
data[k] = changes[k];
|
84
|
-
}
|
85
|
-
}
|
86
|
-
if (!sync) {
|
87
|
-
for (k in data) {
|
88
|
-
if (changes[k] == null) {
|
89
|
-
delete data[k];
|
90
|
-
}
|
91
|
-
}
|
92
|
-
}
|
93
|
-
_.extend(data, defaults);
|
94
|
-
return JSON.stringify(data);
|
95
|
-
};
|
96
|
-
|
97
|
-
Funcs.prototype.__select = function(_data, _fields) {
|
98
|
-
var data, fields, ret;
|
99
|
-
data = _data;
|
100
|
-
fields = _fields;
|
101
|
-
ret = _.pick(data, fields.split(","));
|
102
|
-
return JSON.stringify(ret);
|
103
|
-
};
|
104
|
-
|
105
|
-
Funcs.prototype.__push = function(_data, _key, _value) {
|
106
|
-
var data, field, i, keys, last_field, len, value;
|
107
|
-
data = _data;
|
108
|
-
value = _value;
|
109
|
-
keys = _key.split(".");
|
110
|
-
len = keys.length;
|
111
|
-
last_field = data;
|
112
|
-
field = data;
|
113
|
-
i = 0;
|
114
|
-
while (i < len) {
|
115
|
-
last_field = field;
|
116
|
-
if (field) {
|
117
|
-
field = field[keys[i]];
|
118
|
-
}
|
119
|
-
++i;
|
120
|
-
}
|
121
|
-
if (field) {
|
122
|
-
field.push(value);
|
123
|
-
} else {
|
124
|
-
if (!(value instanceof Array)) {
|
125
|
-
value = [value];
|
126
|
-
}
|
127
|
-
last_field[keys.pop()] = value;
|
128
|
-
}
|
129
|
-
return JSON.stringify(data);
|
130
|
-
};
|
131
|
-
|
132
|
-
Funcs.prototype.__uuid = function() {
|
133
|
-
var ary;
|
134
|
-
ary = plv8.execute('SELECT uuid_generate_v4() as uuid;');
|
135
|
-
return JSON.stringify(ary[0]);
|
136
|
-
};
|
137
|
-
|
138
|
-
Funcs.prototype.__defaults = function() {
|
139
|
-
var timestamp, uuid;
|
140
|
-
uuid = JSON.parse(plv8.find_function('__uuid')());
|
141
|
-
timestamp = new Date();
|
142
|
-
return JSON.stringify({
|
143
|
-
uuid: uuid.uuid,
|
144
|
-
created_at: timestamp,
|
145
|
-
updated_at: timestamp
|
146
|
-
});
|
147
|
-
};
|
148
|
-
|
149
|
-
Funcs.prototype.__create_table = function(schema_name, table_name) {
|
150
|
-
plv8.execute(" CREATE TABLE " + schema_name + "." + table_name + " (\n id serial NOT NULL,\n data json DEFAULT __uuid() NOT NULL,\n CONSTRAINT " + schema_name + "_" + table_name + "_pkey PRIMARY KEY (id));\n\n CREATE UNIQUE INDEX indx_" + schema_name + "_" + table_name + "_unique_uuid ON " + schema_name + "." + table_name + " (__string(data,'uuid'));");
|
151
|
-
return JSON.stringify(table_name);
|
152
|
-
};
|
153
|
-
|
154
|
-
Funcs.prototype.__drop_table = function(schema_name, table_name) {
|
155
|
-
plv8.execute("DROP TABLE IF EXISTS " + schema_name + "." + table_name + " CASCADE;");
|
156
|
-
return JSON.stringify(table_name);
|
157
|
-
};
|
158
|
-
|
159
|
-
Funcs.prototype.__create_index = function(schema_name, table_name, optns) {
|
160
|
-
var cols, index_name, meth, name, sql, type, _ref, _ref1;
|
161
|
-
index_name = "indx_" + schema_name + "_" + table_name;
|
162
|
-
_ref = optns.cols;
|
163
|
-
for (name in _ref) {
|
164
|
-
type = _ref[name];
|
165
|
-
index_name += "_" + name;
|
166
|
-
}
|
167
|
-
sql = ["CREATE"];
|
168
|
-
if (optns.unique) {
|
169
|
-
sql.push("UNIQUE");
|
170
|
-
}
|
171
|
-
sql.push("INDEX");
|
172
|
-
if (optns.concurrently) {
|
173
|
-
sql.push("CONCURRENTLY");
|
174
|
-
}
|
175
|
-
sql.push("" + index_name + " on " + schema_name + "." + table_name);
|
176
|
-
sql.push("(");
|
177
|
-
cols = [];
|
178
|
-
_ref1 = optns.cols;
|
179
|
-
for (name in _ref1) {
|
180
|
-
type = _ref1[name];
|
181
|
-
meth = "__" + (type === 'text' ? 'string' : type);
|
182
|
-
cols.push("" + meth + "(data,'" + name + "'::" + type + ")");
|
183
|
-
}
|
184
|
-
sql.push(cols.join(","));
|
185
|
-
sql.push(")");
|
186
|
-
sql = sql.join(" ");
|
187
|
-
plv8.execute(sql);
|
188
|
-
return JSON.stringify(index_name);
|
189
|
-
};
|
190
|
-
|
191
|
-
Funcs.prototype.__drop_index = function(schema_name, table_name, optns) {
|
192
|
-
var index_name, name, type, _ref;
|
193
|
-
index_name = "indx_" + schema_name + "_" + table_name;
|
194
|
-
_ref = optns.cols;
|
195
|
-
for (name in _ref) {
|
196
|
-
type = _ref[name];
|
197
|
-
index_name += "_" + name;
|
198
|
-
}
|
199
|
-
plv8.execute("DROP INDEX IF EXISTS " + index_name);
|
200
|
-
return JSON.stringify(index_name);
|
201
|
-
};
|
202
|
-
|
203
|
-
Funcs.prototype.__query = function(_schema_name, _table_name, _query) {
|
204
|
-
var builder, params, result, rows, search_path, sql, _ref, _ref1;
|
205
|
-
search_path = _schema_name === "public" ? _schema_name : "" + _schema_name + ", public";
|
206
|
-
builder = new actn.Builder(_schema_name, _table_name, search_path, _query);
|
207
|
-
_ref = builder.build_select(), sql = _ref[0], params = _ref[1];
|
208
|
-
rows = plv8.execute(sql, params);
|
209
|
-
builder = null;
|
210
|
-
if ((_query != null ? (_ref1 = _query.select) != null ? _ref1.indexOf('COUNT') : void 0 : void 0) > -1) {
|
211
|
-
result = rows;
|
212
|
-
} else {
|
213
|
-
result = _.pluck(rows, 'data');
|
214
|
-
}
|
215
|
-
return JSON.stringify(result);
|
216
|
-
};
|
217
|
-
|
218
|
-
Funcs.prototype.__upsert = function(_schema_name, _table_name, _data) {
|
219
|
-
var builder, data, params, query, result, rows, search_path, sql, _ref, _ref1;
|
220
|
-
if (_data.errors != null) {
|
221
|
-
return JSON.stringify(_data);
|
222
|
-
}
|
223
|
-
data = _data;
|
224
|
-
search_path = _schema_name === "public" ? _schema_name : "" + _schema_name + ",public";
|
225
|
-
if (data.uuid != null) {
|
226
|
-
query = {
|
227
|
-
where: {
|
228
|
-
uuid: data.uuid
|
229
|
-
}
|
230
|
-
};
|
231
|
-
builder = new actn.Builder(_schema_name, _table_name, search_path, query);
|
232
|
-
_ref = builder.build_update(data), sql = _ref[0], params = _ref[1];
|
233
|
-
} else {
|
234
|
-
builder = new actn.Builder(_schema_name, _table_name, search_path, {});
|
235
|
-
_ref1 = builder.build_insert(data), sql = _ref1[0], params = _ref1[1];
|
236
|
-
}
|
237
|
-
rows = plv8.execute(sql, params);
|
238
|
-
result = _.pluck(rows, 'data');
|
239
|
-
if (result.length === 1) {
|
240
|
-
result = result[0];
|
241
|
-
}
|
242
|
-
builder = null;
|
243
|
-
return JSON.stringify(result);
|
244
|
-
};
|
245
|
-
|
246
|
-
Funcs.prototype.__update = function(_schema_name, _table_name, _data, _cond) {
|
247
|
-
var builder, params, result, rows, search_path, sql, _ref;
|
248
|
-
if (_data.errors != null) {
|
249
|
-
return JSON.stringify(_data);
|
250
|
-
}
|
251
|
-
search_path = _schema_name === "public" ? _schema_name : "" + _schema_name + ",public";
|
252
|
-
builder = new actn.Builder(_schema_name, _table_name, search_path, {
|
253
|
-
where: _cond
|
254
|
-
});
|
255
|
-
_ref = builder.build_update(_data), sql = _ref[0], params = _ref[1];
|
256
|
-
rows = plv8.execute(sql, params);
|
257
|
-
result = _.pluck(rows, 'data');
|
258
|
-
if (result.length === 1) {
|
259
|
-
result = result[0];
|
260
|
-
}
|
261
|
-
builder = null;
|
262
|
-
return JSON.stringify(result);
|
263
|
-
};
|
264
|
-
|
265
|
-
Funcs.prototype.__delete = function(_schema_name, _table_name, _cond) {
|
266
|
-
var builder, params, result, rows, search_path, sql, _ref;
|
267
|
-
search_path = _schema_name === "public" ? _schema_name : "" + _schema_name + ",public";
|
268
|
-
builder = new actn.Builder(_schema_name, _table_name, search_path, {
|
269
|
-
where: _cond
|
270
|
-
});
|
271
|
-
_ref = builder.build_delete(), sql = _ref[0], params = _ref[1];
|
272
|
-
rows = plv8.execute(sql, params);
|
273
|
-
result = _.pluck(rows, 'data');
|
274
|
-
if (result.length === 1) {
|
275
|
-
result = result[0];
|
276
|
-
}
|
277
|
-
builder = null;
|
278
|
-
return JSON.stringify(result);
|
279
|
-
};
|
280
|
-
|
281
|
-
Funcs.prototype.__validate = function(_name, _data) {
|
282
|
-
var data, errors, found, model, uniq_attr, where, __query, _base, _i, _len, _ref, _schema, _table;
|
283
|
-
data = _data;
|
284
|
-
if (!(model = plv8.find_function('__find_model')(_name))) {
|
285
|
-
return data;
|
286
|
-
}
|
287
|
-
model = JSON.parse(model);
|
288
|
-
if ((model != null ? model.schema : void 0) != null) {
|
289
|
-
errors = actn.jjv.validate(model.schema, data);
|
290
|
-
plv8.elog(NOTICE, "VALVAL", JSON.stringify(model.schema));
|
291
|
-
if ((data.uuid != null) && (model.schema.readonly_attributes != null)) {
|
292
|
-
data = _.omit(data, model.schema.readonly_attributes);
|
293
|
-
} else if (model.schema.unique_attributes != null) {
|
294
|
-
_schema = _name === "Model" ? "core" : "public";
|
295
|
-
_table = model.name.tableize();
|
296
|
-
__query = plv8.find_function("__query");
|
297
|
-
_ref = model.schema.unique_attributes || [];
|
298
|
-
for (_i = 0, _len = _ref.length; _i < _len; _i++) {
|
299
|
-
uniq_attr = _ref[_i];
|
300
|
-
if (data[uniq_attr] != null) {
|
301
|
-
where = {};
|
302
|
-
where[uniq_attr] = data[uniq_attr];
|
303
|
-
found = JSON.parse(__query(_schema, _table, {
|
304
|
-
where: where
|
305
|
-
}));
|
306
|
-
if (!_.isEmpty(found)) {
|
307
|
-
if (errors == null) {
|
308
|
-
errors = {
|
309
|
-
validation: {}
|
310
|
-
};
|
311
|
-
}
|
312
|
-
if ((_base = errors['validation'])[uniq_attr] == null) {
|
313
|
-
_base[uniq_attr] = {};
|
314
|
-
}
|
315
|
-
errors['validation'][uniq_attr]["has already been taken"] = true;
|
316
|
-
}
|
317
|
-
}
|
318
|
-
}
|
319
|
-
}
|
320
|
-
if (errors != null) {
|
321
|
-
data = {
|
322
|
-
errors: errors
|
323
|
-
};
|
324
|
-
}
|
325
|
-
}
|
326
|
-
return data;
|
327
|
-
};
|
328
|
-
|
329
|
-
Funcs.prototype.__find_model = function(_name) {
|
330
|
-
var result, rows;
|
331
|
-
rows = plv8.execute("SET search_path TO core,public; \nSELECT data FROM core.models \nWHERE __string(data,'name'::text) = $1::text", [_name]);
|
332
|
-
if (rows == null) {
|
333
|
-
return;
|
334
|
-
}
|
335
|
-
result = _.pluck(rows, 'data')[0];
|
336
|
-
return JSON.stringify(result);
|
337
|
-
};
|
338
|
-
|
339
|
-
Funcs.prototype.model_callbacks = function(TG_OP, NEW, OLD) {
|
340
|
-
var diff, differ, indopts, mapper, table_name, table_schema, _i, _j, _k, _l, _len, _len1, _len2, _len3, _ref, _ref1, _ref10, _ref2, _ref3, _ref4, _ref5, _ref6, _ref7, _ref8, _ref9, _results, _results1;
|
341
|
-
table_name = (_ref = (NEW != null ? (_ref1 = NEW.data) != null ? _ref1.name : void 0 : void 0) || (OLD != null ? (_ref2 = OLD.data) != null ? _ref2.name : void 0 : void 0)) != null ? _ref.tableize() : void 0;
|
342
|
-
table_schema = ((NEW != null ? (_ref3 = NEW.data) != null ? _ref3.table_schema : void 0 : void 0) || (OLD != null ? (_ref4 = OLD.data) != null ? _ref4.table_schema : void 0 : void 0)) || "public";
|
343
|
-
if (table_schema === "core") {
|
344
|
-
return;
|
345
|
-
}
|
346
|
-
mapper = function(ind) {
|
347
|
-
return _.keys(ind.cols);
|
348
|
-
};
|
349
|
-
differ = function(_ind) {
|
350
|
-
return function(ind) {
|
351
|
-
var _ref5;
|
352
|
-
return _.isEmpty(_.difference(_.keys(ind.cols), _.flatten(_.map((_ref5 = _ind.data) != null ? _ref5.indexes : void 0, mapper))));
|
353
|
-
};
|
354
|
-
};
|
355
|
-
switch (TG_OP) {
|
356
|
-
case "INSERT":
|
357
|
-
plv8.execute("SELECT __create_table($1,$2)", [table_schema, table_name]);
|
358
|
-
plv8.execute("SELECT __create_index($1,$2,$3)", [
|
359
|
-
table_schema, table_name, {
|
360
|
-
cols: {
|
361
|
-
path: "text"
|
362
|
-
}
|
363
|
-
}
|
364
|
-
]);
|
365
|
-
_ref6 = (NEW != null ? (_ref5 = NEW.data) != null ? _ref5.indexes : void 0 : void 0) || [];
|
366
|
-
_results = [];
|
367
|
-
for (_i = 0, _len = _ref6.length; _i < _len; _i++) {
|
368
|
-
indopts = _ref6[_i];
|
369
|
-
_results.push(plv8.execute("SELECT __create_index($1,$2,$3)", [table_schema, table_name, indopts]));
|
370
|
-
}
|
371
|
-
return _results;
|
372
|
-
break;
|
373
|
-
case "UPDATE":
|
374
|
-
diff = _.reject(OLD != null ? (_ref7 = OLD.data) != null ? _ref7.indexes : void 0 : void 0, differ(NEW));
|
375
|
-
for (_j = 0, _len1 = diff.length; _j < _len1; _j++) {
|
376
|
-
indopts = diff[_j];
|
377
|
-
plv8.execute("SELECT __drop_index($1,$2,$3)", [table_schema, table_name, indopts]);
|
378
|
-
}
|
379
|
-
diff = _.reject(NEW != null ? (_ref8 = NEW.data) != null ? _ref8.indexes : void 0 : void 0, differ(OLD));
|
380
|
-
_results1 = [];
|
381
|
-
for (_k = 0, _len2 = diff.length; _k < _len2; _k++) {
|
382
|
-
indopts = diff[_k];
|
383
|
-
_results1.push(plv8.execute("SELECT __create_index($1,$2,$3)", [table_schema, table_name, indopts]));
|
384
|
-
}
|
385
|
-
return _results1;
|
386
|
-
break;
|
387
|
-
case "DELETE":
|
388
|
-
_ref10 = (typeof Old !== "undefined" && Old !== null ? (_ref9 = Old.data) != null ? _ref9.indexes : void 0 : void 0) || [];
|
389
|
-
for (_l = 0, _len3 = _ref10.length; _l < _len3; _l++) {
|
390
|
-
indopts = _ref10[_l];
|
391
|
-
plv8.execute("SELECT __drop_index($1,$2,$3)", [table_schema, table_name, indopts]);
|
392
|
-
}
|
393
|
-
return plv8.execute("SELECT __drop_table($1,$2)", [table_schema, table_name]);
|
394
|
-
}
|
395
|
-
};
|
396
|
-
|
397
|
-
return Funcs;
|
398
|
-
|
399
|
-
})();
|
400
|
-
|
401
|
-
actn.funcs = new Funcs;
|
402
|
-
|
403
|
-
}).call(this);
|