logidze 0.12.0 → 1.0.0.rc1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +29 -5
- data/LICENSE.txt +1 -1
- data/README.md +263 -103
- data/lib/generators/logidze/fx_helper.rb +17 -0
- data/lib/generators/logidze/inject_sql.rb +18 -0
- data/lib/generators/logidze/install/USAGE +6 -1
- data/lib/generators/logidze/install/functions/logidze_compact_history.sql +38 -0
- data/lib/generators/logidze/install/functions/logidze_filter_keys.sql +27 -0
- data/lib/generators/logidze/install/functions/logidze_logger.sql +150 -0
- data/lib/generators/logidze/install/functions/logidze_snapshot.sql +24 -0
- data/lib/generators/logidze/install/functions/logidze_version.sql +20 -0
- data/lib/generators/logidze/install/install_generator.rb +58 -1
- data/lib/generators/logidze/install/templates/hstore.rb.erb +1 -1
- data/lib/generators/logidze/install/templates/migration.rb.erb +19 -232
- data/lib/generators/logidze/install/templates/migration_fx.rb.erb +41 -0
- data/lib/generators/logidze/model/model_generator.rb +49 -13
- data/lib/generators/logidze/model/templates/migration.rb.erb +57 -36
- data/lib/generators/logidze/model/triggers/logidze.sql +6 -0
- data/lib/logidze.rb +27 -14
- data/lib/logidze/history.rb +1 -10
- data/lib/logidze/ignore_log_data.rb +1 -4
- data/lib/logidze/model.rb +48 -35
- data/lib/logidze/version.rb +1 -1
- metadata +48 -73
- data/.gitattributes +0 -3
- data/.github/ISSUE_TEMPLATE.md +0 -20
- data/.github/PULL_REQUEST_TEMPLATE.md +0 -29
- data/.gitignore +0 -40
- data/.rubocop.yml +0 -55
- data/.travis.yml +0 -46
- data/Gemfile +0 -15
- data/Rakefile +0 -28
- data/assets/pg_log_data_chart.png +0 -0
- data/bench/performance/README.md +0 -109
- data/bench/performance/diff_bench.rb +0 -38
- data/bench/performance/insert_bench.rb +0 -22
- data/bench/performance/memory_profile.rb +0 -56
- data/bench/performance/setup.rb +0 -315
- data/bench/performance/update_bench.rb +0 -38
- data/bench/triggers/Makefile +0 -56
- data/bench/triggers/Readme.md +0 -58
- data/bench/triggers/bench.sql +0 -6
- data/bench/triggers/hstore_trigger_setup.sql +0 -38
- data/bench/triggers/jsonb_minus_2_setup.sql +0 -47
- data/bench/triggers/jsonb_minus_setup.sql +0 -49
- data/bench/triggers/keys2_trigger_setup.sql +0 -44
- data/bench/triggers/keys_trigger_setup.sql +0 -50
- data/bin/console +0 -8
- data/bin/setup +0 -9
- data/gemfiles/rails42.gemfile +0 -6
- data/gemfiles/rails5.gemfile +0 -6
- data/gemfiles/rails52.gemfile +0 -6
- data/gemfiles/rails6.gemfile +0 -6
- data/gemfiles/railsmaster.gemfile +0 -7
- data/lib/logidze/ignore_log_data/ignored_columns.rb +0 -46
- data/lib/logidze/migration.rb +0 -20
- data/logidze.gemspec +0 -41
@@ -0,0 +1,17 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Logidze
|
4
|
+
module Generators
|
5
|
+
# Adds --fx option and provide #fx? method
|
6
|
+
module FxHelper
|
7
|
+
def self.included(base)
|
8
|
+
base.class_option :fx, type: :boolean, optional: true,
|
9
|
+
desc: "Define whether to use fx gem functionality"
|
10
|
+
end
|
11
|
+
|
12
|
+
def fx?
|
13
|
+
options[:fx] || (options[:fx] != false && defined?(::Fx::SchemaDumper))
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Logidze
|
4
|
+
module Generators
|
5
|
+
module InjectSql
|
6
|
+
def inject_sql(source, indent: 4)
|
7
|
+
source = ::File.expand_path(find_in_source_paths(source.to_s))
|
8
|
+
|
9
|
+
indent(
|
10
|
+
ERB.new(::File.binread(source)).tap do |erb|
|
11
|
+
erb.filename = source
|
12
|
+
end.result(instance_eval("binding")), # rubocop:disable Style/EvalWithLocation
|
13
|
+
indent
|
14
|
+
)
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
@@ -1,7 +1,12 @@
|
|
1
1
|
Description:
|
2
2
|
Generates the necessary files to get you up and running with Logidze gem
|
3
|
-
|
3
|
+
|
4
4
|
Examples:
|
5
5
|
rails generate logidze:install
|
6
6
|
|
7
7
|
This will generate the core migration file with trigger function defined.
|
8
|
+
|
9
|
+
rails generate logidze:install --fx
|
10
|
+
|
11
|
+
This will generate schema.rb compatible migration with `create_function` definitions and separate SQL files.
|
12
|
+
The fx gem must be installed.
|
@@ -0,0 +1,38 @@
|
|
1
|
+
-- version: 1
|
2
|
+
CREATE OR REPLACE FUNCTION logidze_compact_history(log_data jsonb, cutoff integer DEFAULT 1) RETURNS jsonb AS $body$
|
3
|
+
DECLARE
|
4
|
+
merged jsonb;
|
5
|
+
BEGIN
|
6
|
+
LOOP
|
7
|
+
merged := jsonb_build_object(
|
8
|
+
'ts',
|
9
|
+
log_data#>'{h,1,ts}',
|
10
|
+
'v',
|
11
|
+
log_data#>'{h,1,v}',
|
12
|
+
'c',
|
13
|
+
(log_data#>'{h,0,c}') || (log_data#>'{h,1,c}')
|
14
|
+
);
|
15
|
+
|
16
|
+
IF (log_data#>'{h,1}' ? 'm') THEN
|
17
|
+
merged := jsonb_set(merged, ARRAY['m'], log_data#>'{h,1,m}');
|
18
|
+
END IF;
|
19
|
+
|
20
|
+
log_data := jsonb_set(
|
21
|
+
log_data,
|
22
|
+
'{h}',
|
23
|
+
jsonb_set(
|
24
|
+
log_data->'h',
|
25
|
+
'{1}',
|
26
|
+
merged
|
27
|
+
) - 0
|
28
|
+
);
|
29
|
+
|
30
|
+
cutoff := cutoff - 1;
|
31
|
+
|
32
|
+
EXIT WHEN cutoff <= 0;
|
33
|
+
END LOOP;
|
34
|
+
|
35
|
+
return log_data;
|
36
|
+
END;
|
37
|
+
$body$
|
38
|
+
LANGUAGE plpgsql;
|
@@ -0,0 +1,27 @@
|
|
1
|
+
-- version: 1
|
2
|
+
CREATE OR REPLACE FUNCTION logidze_filter_keys(obj jsonb, keys text[], include_columns boolean DEFAULT false) RETURNS jsonb AS $body$
|
3
|
+
DECLARE
|
4
|
+
res jsonb;
|
5
|
+
key text;
|
6
|
+
BEGIN
|
7
|
+
res := '{}';
|
8
|
+
|
9
|
+
IF include_columns THEN
|
10
|
+
FOREACH key IN ARRAY keys
|
11
|
+
LOOP
|
12
|
+
IF obj ? key THEN
|
13
|
+
res = jsonb_insert(res, ARRAY[key], obj->key);
|
14
|
+
END IF;
|
15
|
+
END LOOP;
|
16
|
+
ELSE
|
17
|
+
res = obj;
|
18
|
+
FOREACH key IN ARRAY keys
|
19
|
+
LOOP
|
20
|
+
res = res - key;
|
21
|
+
END LOOP;
|
22
|
+
END IF;
|
23
|
+
|
24
|
+
RETURN res;
|
25
|
+
END;
|
26
|
+
$body$
|
27
|
+
LANGUAGE plpgsql;
|
@@ -0,0 +1,150 @@
|
|
1
|
+
-- version: 1
|
2
|
+
CREATE OR REPLACE FUNCTION logidze_logger() RETURNS TRIGGER AS $body$
|
3
|
+
DECLARE
|
4
|
+
changes jsonb;
|
5
|
+
version jsonb;
|
6
|
+
snapshot jsonb;
|
7
|
+
new_v integer;
|
8
|
+
size integer;
|
9
|
+
history_limit integer;
|
10
|
+
debounce_time integer;
|
11
|
+
current_version integer;
|
12
|
+
merged jsonb;
|
13
|
+
iterator integer;
|
14
|
+
item record;
|
15
|
+
columns text[];
|
16
|
+
include_columns boolean;
|
17
|
+
ts timestamp with time zone;
|
18
|
+
ts_column text;
|
19
|
+
BEGIN
|
20
|
+
ts_column := NULLIF(TG_ARGV[1], 'null');
|
21
|
+
columns := NULLIF(TG_ARGV[2], 'null');
|
22
|
+
include_columns := NULLIF(TG_ARGV[3], 'null');
|
23
|
+
|
24
|
+
IF TG_OP = 'INSERT' THEN
|
25
|
+
-- always exclude log_data column
|
26
|
+
changes := to_jsonb(NEW.*) - 'log_data';
|
27
|
+
|
28
|
+
IF columns IS NOT NULL THEN
|
29
|
+
snapshot = logidze_snapshot(changes, ts_column, columns, include_columns);
|
30
|
+
ELSE
|
31
|
+
snapshot = logidze_snapshot(changes, ts_column);
|
32
|
+
END IF;
|
33
|
+
|
34
|
+
IF snapshot#>>'{h, -1, c}' != '{}' THEN
|
35
|
+
NEW.log_data := snapshot;
|
36
|
+
END IF;
|
37
|
+
|
38
|
+
ELSIF TG_OP = 'UPDATE' THEN
|
39
|
+
|
40
|
+
IF OLD.log_data is NULL OR OLD.log_data = '{}'::jsonb THEN
|
41
|
+
-- always exclude log_data column
|
42
|
+
changes := to_jsonb(NEW.*) - 'log_data';
|
43
|
+
|
44
|
+
IF columns IS NOT NULL THEN
|
45
|
+
snapshot = logidze_snapshot(changes, ts_column, columns, include_columns);
|
46
|
+
ELSE
|
47
|
+
snapshot = logidze_snapshot(changes, ts_column);
|
48
|
+
END IF;
|
49
|
+
|
50
|
+
IF snapshot#>>'{h, -1, c}' != '{}' THEN
|
51
|
+
NEW.log_data := snapshot;
|
52
|
+
END IF;
|
53
|
+
RETURN NEW;
|
54
|
+
END IF;
|
55
|
+
|
56
|
+
history_limit := NULLIF(TG_ARGV[0], 'null');
|
57
|
+
debounce_time := NULLIF(TG_ARGV[4], 'null');
|
58
|
+
|
59
|
+
current_version := (NEW.log_data->>'v')::int;
|
60
|
+
|
61
|
+
IF ts_column IS NULL THEN
|
62
|
+
ts := statement_timestamp();
|
63
|
+
ELSE
|
64
|
+
ts := (to_jsonb(NEW.*)->>ts_column)::timestamp with time zone;
|
65
|
+
IF ts IS NULL OR ts = (to_jsonb(OLD.*)->>ts_column)::timestamp with time zone THEN
|
66
|
+
ts := statement_timestamp();
|
67
|
+
END IF;
|
68
|
+
END IF;
|
69
|
+
|
70
|
+
IF NEW = OLD THEN
|
71
|
+
RETURN NEW;
|
72
|
+
END IF;
|
73
|
+
|
74
|
+
IF current_version < (NEW.log_data#>>'{h,-1,v}')::int THEN
|
75
|
+
iterator := 0;
|
76
|
+
FOR item in SELECT * FROM jsonb_array_elements(NEW.log_data->'h')
|
77
|
+
LOOP
|
78
|
+
IF (item.value->>'v')::int > current_version THEN
|
79
|
+
NEW.log_data := jsonb_set(
|
80
|
+
NEW.log_data,
|
81
|
+
'{h}',
|
82
|
+
(NEW.log_data->'h') - iterator
|
83
|
+
);
|
84
|
+
END IF;
|
85
|
+
iterator := iterator + 1;
|
86
|
+
END LOOP;
|
87
|
+
END IF;
|
88
|
+
|
89
|
+
changes := '{}';
|
90
|
+
|
91
|
+
IF (coalesce(current_setting('logidze.full_snapshot', true), '') = 'on') THEN
|
92
|
+
changes = hstore_to_jsonb_loose(hstore(NEW.*));
|
93
|
+
ELSE
|
94
|
+
changes = hstore_to_jsonb_loose(
|
95
|
+
hstore(NEW.*) - hstore(OLD.*)
|
96
|
+
);
|
97
|
+
END IF;
|
98
|
+
|
99
|
+
changes = changes - 'log_data';
|
100
|
+
|
101
|
+
IF columns IS NOT NULL THEN
|
102
|
+
changes = logidze_filter_keys(changes, columns, include_columns);
|
103
|
+
END IF;
|
104
|
+
|
105
|
+
IF changes = '{}' THEN
|
106
|
+
RETURN NEW;
|
107
|
+
END IF;
|
108
|
+
|
109
|
+
new_v := (NEW.log_data#>>'{h,-1,v}')::int + 1;
|
110
|
+
|
111
|
+
size := jsonb_array_length(NEW.log_data->'h');
|
112
|
+
version := logidze_version(new_v, changes, ts);
|
113
|
+
|
114
|
+
IF (
|
115
|
+
debounce_time IS NOT NULL AND
|
116
|
+
(version->>'ts')::bigint - (NEW.log_data#>'{h,-1,ts}')::text::bigint <= debounce_time
|
117
|
+
) THEN
|
118
|
+
-- merge new version with the previous one
|
119
|
+
new_v := (NEW.log_data#>>'{h,-1,v}')::int;
|
120
|
+
version := logidze_version(new_v, (NEW.log_data#>'{h,-1,c}')::jsonb || changes, ts);
|
121
|
+
-- remove the previous version from log
|
122
|
+
NEW.log_data := jsonb_set(
|
123
|
+
NEW.log_data,
|
124
|
+
'{h}',
|
125
|
+
(NEW.log_data->'h') - (size - 1)
|
126
|
+
);
|
127
|
+
END IF;
|
128
|
+
|
129
|
+
NEW.log_data := jsonb_set(
|
130
|
+
NEW.log_data,
|
131
|
+
ARRAY['h', size::text],
|
132
|
+
version,
|
133
|
+
true
|
134
|
+
);
|
135
|
+
|
136
|
+
NEW.log_data := jsonb_set(
|
137
|
+
NEW.log_data,
|
138
|
+
'{v}',
|
139
|
+
to_jsonb(new_v)
|
140
|
+
);
|
141
|
+
|
142
|
+
IF history_limit IS NOT NULL AND history_limit <= size THEN
|
143
|
+
NEW.log_data := logidze_compact_history(NEW.log_data, size - history_limit + 1);
|
144
|
+
END IF;
|
145
|
+
END IF;
|
146
|
+
|
147
|
+
return NEW;
|
148
|
+
END;
|
149
|
+
$body$
|
150
|
+
LANGUAGE plpgsql;
|
@@ -0,0 +1,24 @@
|
|
1
|
+
-- version: 1
|
2
|
+
CREATE OR REPLACE FUNCTION logidze_snapshot(item jsonb, ts_column text DEFAULT NULL, columns text[] DEFAULT NULL, include_columns boolean DEFAULT false) RETURNS jsonb AS $body$
|
3
|
+
DECLARE
|
4
|
+
ts timestamp with time zone;
|
5
|
+
BEGIN
|
6
|
+
IF ts_column IS NULL THEN
|
7
|
+
ts := statement_timestamp();
|
8
|
+
ELSE
|
9
|
+
ts := coalesce((item->>ts_column)::timestamp with time zone, statement_timestamp());
|
10
|
+
END IF;
|
11
|
+
|
12
|
+
IF columns IS NOT NULL THEN
|
13
|
+
item := logidze_filter_keys(item, columns, include_columns);
|
14
|
+
END IF;
|
15
|
+
|
16
|
+
return json_build_object(
|
17
|
+
'v', 1,
|
18
|
+
'h', jsonb_build_array(
|
19
|
+
logidze_version(1, item, ts)
|
20
|
+
)
|
21
|
+
);
|
22
|
+
END;
|
23
|
+
$body$
|
24
|
+
LANGUAGE plpgsql;
|
@@ -0,0 +1,20 @@
|
|
1
|
+
-- version: 1
|
2
|
+
CREATE OR REPLACE FUNCTION logidze_version(v bigint, data jsonb, ts timestamp with time zone) RETURNS jsonb AS $body$
|
3
|
+
DECLARE
|
4
|
+
buf jsonb;
|
5
|
+
BEGIN
|
6
|
+
buf := jsonb_build_object(
|
7
|
+
'ts',
|
8
|
+
(extract(epoch from ts) * 1000)::bigint,
|
9
|
+
'v',
|
10
|
+
v,
|
11
|
+
'c',
|
12
|
+
data
|
13
|
+
);
|
14
|
+
IF coalesce(current_setting('logidze.meta', true), '') <> '' THEN
|
15
|
+
buf := jsonb_insert(buf, '{m}', current_setting('logidze.meta')::jsonb);
|
16
|
+
END IF;
|
17
|
+
RETURN buf;
|
18
|
+
END;
|
19
|
+
$body$
|
20
|
+
LANGUAGE plpgsql;
|
@@ -2,19 +2,29 @@
|
|
2
2
|
|
3
3
|
require "rails/generators"
|
4
4
|
require "rails/generators/active_record"
|
5
|
+
require_relative "../inject_sql"
|
6
|
+
require_relative "../fx_helper"
|
7
|
+
|
8
|
+
using RubyNext
|
5
9
|
|
6
10
|
module Logidze
|
7
11
|
module Generators
|
8
12
|
class InstallGenerator < ::Rails::Generators::Base # :nodoc:
|
9
13
|
include Rails::Generators::Migration
|
14
|
+
include InjectSql
|
15
|
+
include FxHelper
|
16
|
+
|
17
|
+
class FuncDef < Struct.new(:name, :version, :signature); end
|
10
18
|
|
11
19
|
source_root File.expand_path("templates", __dir__)
|
20
|
+
source_paths << File.expand_path("functions", __dir__)
|
12
21
|
|
13
22
|
class_option :update, type: :boolean, optional: true,
|
14
23
|
desc: "Define whether this is an update migration"
|
15
24
|
|
16
25
|
def generate_migration
|
17
|
-
migration_template "
|
26
|
+
migration_template = fx? ? "migration_fx.rb.erb" : "migration.rb.erb"
|
27
|
+
migration_template migration_template, "db/migrate/#{migration_name}.rb"
|
18
28
|
end
|
19
29
|
|
20
30
|
def generate_hstore_migration
|
@@ -23,6 +33,16 @@ module Logidze
|
|
23
33
|
migration_template "hstore.rb.erb", "db/migrate/enable_hstore.rb"
|
24
34
|
end
|
25
35
|
|
36
|
+
def generate_fx_functions
|
37
|
+
return unless fx?
|
38
|
+
|
39
|
+
function_definitions.each do |fdef|
|
40
|
+
next if fdef.version == previous_version_for(fdef.name)
|
41
|
+
|
42
|
+
template "#{fdef.name}.sql", "db/functions/#{fdef.name}_v#{fdef.version.to_s.rjust(2, "0")}.sql"
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
26
46
|
no_tasks do
|
27
47
|
def migration_name
|
28
48
|
if update?
|
@@ -39,6 +59,43 @@ module Logidze
|
|
39
59
|
def update?
|
40
60
|
options[:update]
|
41
61
|
end
|
62
|
+
|
63
|
+
def previous_version_for(name)
|
64
|
+
all_functions.filter_map { |path| Regexp.last_match[1].to_i if path =~ %r{#{name}_v(\d+).sql} }.max
|
65
|
+
end
|
66
|
+
|
67
|
+
def all_functions
|
68
|
+
@all_functions ||=
|
69
|
+
begin
|
70
|
+
res = nil
|
71
|
+
in_root do
|
72
|
+
res = if File.directory?("db/functions")
|
73
|
+
Dir.entries("db/functions")
|
74
|
+
else
|
75
|
+
[]
|
76
|
+
end
|
77
|
+
end
|
78
|
+
res
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
def function_definitions
|
83
|
+
@function_definitions ||=
|
84
|
+
begin
|
85
|
+
Dir.glob(File.join(__dir__, "functions", "*.sql")).map do |path|
|
86
|
+
name = path.match(/([^\/]+)\.sql/)[1]
|
87
|
+
|
88
|
+
file = File.open(path)
|
89
|
+
header = file.readline
|
90
|
+
|
91
|
+
version = header.match(/version:\s+(\d+)/)[1].to_i
|
92
|
+
parameters = file.readline.match(/CREATE OR REPLACE FUNCTION\s+[\w_]+\((.*)\)/)[1]
|
93
|
+
signature = parameters.split(/\s*,\s*/).map { |param| param.split(/\s+/, 2).last.sub(/\s+DEFAULT .*$/, "") }.join(", ")
|
94
|
+
|
95
|
+
FuncDef.new(name, version, signature)
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|
42
99
|
end
|
43
100
|
|
44
101
|
def self.next_migration_number(dir)
|
@@ -1,246 +1,33 @@
|
|
1
|
-
class <%= @migration_class_name %> < ActiveRecord::Migration
|
2
|
-
require 'logidze/migration'
|
3
|
-
include Logidze::Migration
|
4
|
-
|
1
|
+
class <%= @migration_class_name %> < ActiveRecord::Migration[5.0]
|
5
2
|
def up
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
BEGIN
|
10
|
-
EXECUTE 'ALTER DATABASE ' || quote_ident(current_database()) || ' SET logidze.disabled=' || quote_literal('');
|
11
|
-
EXECUTE 'ALTER DATABASE ' || quote_ident(current_database()) || ' SET logidze.meta=' || quote_literal('');
|
12
|
-
END;
|
13
|
-
$$
|
14
|
-
LANGUAGE plpgsql;
|
15
|
-
SQL
|
16
|
-
end
|
17
|
-
|
18
|
-
<% if update? %>
|
19
|
-
execute <<-SQL
|
3
|
+
<%- if update? -%>
|
4
|
+
# Drop legacy functions (<1.0)
|
5
|
+
execute <<~SQL
|
20
6
|
DROP FUNCTION IF EXISTS logidze_version(bigint, jsonb);
|
21
7
|
DROP FUNCTION IF EXISTS logidze_snapshot(jsonb);
|
22
8
|
DROP FUNCTION IF EXISTS logidze_version(bigint, jsonb, text[]);
|
23
9
|
DROP FUNCTION IF EXISTS logidze_snapshot(jsonb, text[]);
|
10
|
+
DROP FUNCTION IF EXISTS logidze_version(bigint, jsonb, timestamp with time zone, text[]);
|
11
|
+
DROP FUNCTION IF EXISTS logidze_snapshot(jsonb, text, text[]);
|
12
|
+
DROP FUNCTION IF EXISTS logidze_exclude_keys(jsonb, VARIADIC text[]);
|
13
|
+
DROP FUNCTION IF EXISTS logidze_compact_history(jsonb);
|
24
14
|
SQL
|
25
|
-
<% end %>
|
26
|
-
|
27
|
-
execute <<-SQL
|
28
|
-
CREATE OR REPLACE FUNCTION logidze_version(v bigint, data jsonb, ts timestamp with time zone, blacklist text[] DEFAULT '{}') RETURNS jsonb AS $body$
|
29
|
-
DECLARE
|
30
|
-
buf jsonb;
|
31
|
-
BEGIN
|
32
|
-
buf := jsonb_build_object(
|
33
|
-
'ts',
|
34
|
-
(extract(epoch from ts) * 1000)::bigint,
|
35
|
-
'v',
|
36
|
-
v,
|
37
|
-
'c',
|
38
|
-
logidze_exclude_keys(data, VARIADIC array_append(blacklist, 'log_data'))
|
39
|
-
);
|
40
|
-
IF coalesce(#{current_setting('logidze.meta')}, '') <> '' THEN
|
41
|
-
buf := jsonb_set(buf, ARRAY['m'], current_setting('logidze.meta')::jsonb);
|
42
|
-
END IF;
|
43
|
-
RETURN buf;
|
44
|
-
END;
|
45
|
-
$body$
|
46
|
-
LANGUAGE plpgsql;
|
47
|
-
|
48
|
-
CREATE OR REPLACE FUNCTION logidze_snapshot(item jsonb, ts_column text, blacklist text[] DEFAULT '{}') RETURNS jsonb AS $body$
|
49
|
-
DECLARE
|
50
|
-
ts timestamp with time zone;
|
51
|
-
BEGIN
|
52
|
-
IF ts_column IS NULL THEN
|
53
|
-
ts := statement_timestamp();
|
54
|
-
ELSE
|
55
|
-
ts := coalesce((item->>ts_column)::timestamp with time zone, statement_timestamp());
|
56
|
-
END IF;
|
57
|
-
return json_build_object(
|
58
|
-
'v', 1,
|
59
|
-
'h', jsonb_build_array(
|
60
|
-
logidze_version(1, item, ts, blacklist)
|
61
|
-
)
|
62
|
-
);
|
63
|
-
END;
|
64
|
-
$body$
|
65
|
-
LANGUAGE plpgsql;
|
66
|
-
|
67
|
-
CREATE OR REPLACE FUNCTION logidze_exclude_keys(obj jsonb, VARIADIC keys text[]) RETURNS jsonb AS $body$
|
68
|
-
DECLARE
|
69
|
-
res jsonb;
|
70
|
-
key text;
|
71
|
-
BEGIN
|
72
|
-
res := obj;
|
73
|
-
FOREACH key IN ARRAY keys
|
74
|
-
LOOP
|
75
|
-
res := res - key;
|
76
|
-
END LOOP;
|
77
|
-
RETURN res;
|
78
|
-
END;
|
79
|
-
$body$
|
80
|
-
LANGUAGE plpgsql;
|
81
|
-
|
82
|
-
CREATE OR REPLACE FUNCTION logidze_compact_history(log_data jsonb) RETURNS jsonb AS $body$
|
83
|
-
DECLARE
|
84
|
-
merged jsonb;
|
85
|
-
BEGIN
|
86
|
-
merged := jsonb_build_object(
|
87
|
-
'ts',
|
88
|
-
log_data#>'{h,1,ts}',
|
89
|
-
'v',
|
90
|
-
log_data#>'{h,1,v}',
|
91
|
-
'c',
|
92
|
-
(log_data#>'{h,0,c}') || (log_data#>'{h,1,c}')
|
93
|
-
);
|
94
|
-
|
95
|
-
IF (log_data#>'{h,1}' ? 'm') THEN
|
96
|
-
merged := jsonb_set(merged, ARRAY['m'], log_data#>'{h,1,m}');
|
97
|
-
END IF;
|
98
|
-
|
99
|
-
return jsonb_set(
|
100
|
-
log_data,
|
101
|
-
'{h}',
|
102
|
-
jsonb_set(
|
103
|
-
log_data->'h',
|
104
|
-
'{1}',
|
105
|
-
merged
|
106
|
-
) - 0
|
107
|
-
);
|
108
|
-
END;
|
109
|
-
$body$
|
110
|
-
LANGUAGE plpgsql;
|
111
|
-
|
112
|
-
CREATE OR REPLACE FUNCTION logidze_logger() RETURNS TRIGGER AS $body$
|
113
|
-
DECLARE
|
114
|
-
changes jsonb;
|
115
|
-
version jsonb;
|
116
|
-
snapshot jsonb;
|
117
|
-
new_v integer;
|
118
|
-
size integer;
|
119
|
-
history_limit integer;
|
120
|
-
debounce_time integer;
|
121
|
-
current_version integer;
|
122
|
-
merged jsonb;
|
123
|
-
iterator integer;
|
124
|
-
item record;
|
125
|
-
columns_blacklist text[];
|
126
|
-
ts timestamp with time zone;
|
127
|
-
ts_column text;
|
128
|
-
BEGIN
|
129
|
-
ts_column := NULLIF(TG_ARGV[1], 'null');
|
130
|
-
columns_blacklist := COALESCE(NULLIF(TG_ARGV[2], 'null'), '{}');
|
131
|
-
|
132
|
-
IF TG_OP = 'INSERT' THEN
|
133
|
-
snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column, columns_blacklist);
|
134
|
-
|
135
|
-
IF snapshot#>>'{h, -1, c}' != '{}' THEN
|
136
|
-
NEW.log_data := snapshot;
|
137
|
-
END IF;
|
138
|
-
|
139
|
-
ELSIF TG_OP = 'UPDATE' THEN
|
140
|
-
|
141
|
-
IF OLD.log_data is NULL OR OLD.log_data = '{}'::jsonb THEN
|
142
|
-
snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column, columns_blacklist);
|
143
|
-
IF snapshot#>>'{h, -1, c}' != '{}' THEN
|
144
|
-
NEW.log_data := snapshot;
|
145
|
-
END IF;
|
146
|
-
RETURN NEW;
|
147
|
-
END IF;
|
148
|
-
|
149
|
-
history_limit := NULLIF(TG_ARGV[0], 'null');
|
150
|
-
debounce_time := NULLIF(TG_ARGV[3], 'null');
|
151
|
-
|
152
|
-
current_version := (NEW.log_data->>'v')::int;
|
153
|
-
|
154
|
-
IF ts_column IS NULL THEN
|
155
|
-
ts := statement_timestamp();
|
156
|
-
ELSE
|
157
|
-
ts := (to_jsonb(NEW.*)->>ts_column)::timestamp with time zone;
|
158
|
-
IF ts IS NULL OR ts = (to_jsonb(OLD.*)->>ts_column)::timestamp with time zone THEN
|
159
|
-
ts := statement_timestamp();
|
160
|
-
END IF;
|
161
|
-
END IF;
|
162
|
-
|
163
|
-
IF NEW = OLD THEN
|
164
|
-
RETURN NEW;
|
165
|
-
END IF;
|
166
|
-
|
167
|
-
IF current_version < (NEW.log_data#>>'{h,-1,v}')::int THEN
|
168
|
-
iterator := 0;
|
169
|
-
FOR item in SELECT * FROM jsonb_array_elements(NEW.log_data->'h')
|
170
|
-
LOOP
|
171
|
-
IF (item.value->>'v')::int > current_version THEN
|
172
|
-
NEW.log_data := jsonb_set(
|
173
|
-
NEW.log_data,
|
174
|
-
'{h}',
|
175
|
-
(NEW.log_data->'h') - iterator
|
176
|
-
);
|
177
|
-
END IF;
|
178
|
-
iterator := iterator + 1;
|
179
|
-
END LOOP;
|
180
|
-
END IF;
|
181
|
-
|
182
|
-
changes := hstore_to_jsonb_loose(
|
183
|
-
hstore(NEW.*) - hstore(OLD.*)
|
184
|
-
);
|
185
|
-
|
186
|
-
new_v := (NEW.log_data#>>'{h,-1,v}')::int + 1;
|
187
|
-
|
188
|
-
size := jsonb_array_length(NEW.log_data->'h');
|
189
|
-
version := logidze_version(new_v, changes, ts, columns_blacklist);
|
190
|
-
|
191
|
-
IF version->>'c' = '{}' THEN
|
192
|
-
RETURN NEW;
|
193
|
-
END IF;
|
194
|
-
|
195
|
-
IF (
|
196
|
-
debounce_time IS NOT NULL AND
|
197
|
-
(version->>'ts')::bigint - (NEW.log_data#>'{h,-1,ts}')::text::bigint <= debounce_time
|
198
|
-
) THEN
|
199
|
-
-- merge new version with the previous one
|
200
|
-
new_v := (NEW.log_data#>>'{h,-1,v}')::int;
|
201
|
-
version := logidze_version(new_v, (NEW.log_data#>'{h,-1,c}')::jsonb || changes, ts, columns_blacklist);
|
202
|
-
-- remove the previous version from log
|
203
|
-
NEW.log_data := jsonb_set(
|
204
|
-
NEW.log_data,
|
205
|
-
'{h}',
|
206
|
-
(NEW.log_data->'h') - (size - 1)
|
207
|
-
);
|
208
|
-
END IF;
|
209
|
-
|
210
|
-
NEW.log_data := jsonb_set(
|
211
|
-
NEW.log_data,
|
212
|
-
ARRAY['h', size::text],
|
213
|
-
version,
|
214
|
-
true
|
215
|
-
);
|
216
|
-
|
217
|
-
NEW.log_data := jsonb_set(
|
218
|
-
NEW.log_data,
|
219
|
-
'{v}',
|
220
|
-
to_jsonb(new_v)
|
221
|
-
);
|
222
|
-
|
223
|
-
IF history_limit IS NOT NULL AND history_limit = size THEN
|
224
|
-
NEW.log_data := logidze_compact_history(NEW.log_data);
|
225
|
-
END IF;
|
226
|
-
END IF;
|
227
15
|
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
16
|
+
<%- end -%>
|
17
|
+
execute <<~SQL
|
18
|
+
<%- function_definitions.each do |f| -%>
|
19
|
+
<%= inject_sql("#{f.name}.sql", indent: 6) %>
|
20
|
+
<%- end -%>
|
232
21
|
SQL
|
233
22
|
end
|
234
23
|
|
235
24
|
def down
|
236
|
-
|
237
|
-
execute
|
238
|
-
|
239
|
-
DROP FUNCTION
|
240
|
-
|
241
|
-
DROP FUNCTION logidze_snapshot(jsonb, text, text[]) CASCADE;
|
242
|
-
DROP FUNCTION logidze_logger() CASCADE;
|
25
|
+
<%- unless update? -%>
|
26
|
+
execute <<~SQL
|
27
|
+
<%- function_definitions.each do |f| -%>
|
28
|
+
DROP FUNCTION IF EXISTS <%= f.name %>(<%= f.signature %>) CASCADE;
|
29
|
+
<%- end -%>
|
243
30
|
SQL
|
244
|
-
|
31
|
+
<%- end -%>
|
245
32
|
end
|
246
33
|
end
|