trakable 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.rubocop.yml +81 -0
- data/CHANGELOG.md +50 -0
- data/LICENSE +21 -0
- data/README.md +330 -0
- data/Rakefile +16 -0
- data/benchmark/full_benchmark.rb +221 -0
- data/benchmark/integration_memory.rb +70 -0
- data/benchmark/memory_benchmark.rb +141 -0
- data/benchmark/perf_benchmark.rb +130 -0
- data/integration/README.md +65 -0
- data/integration/run_all.rb +62 -0
- data/integration/scenarios/01-basic-tracking/scenario.rb +51 -0
- data/integration/scenarios/02-revert-restoration/scenario.rb +103 -0
- data/integration/scenarios/03-whodunnit-tracking/scenario.rb +72 -0
- data/integration/scenarios/04-cleanup-retention/scenario.rb +66 -0
- data/integration/scenarios/05-without-tracking/scenario.rb +62 -0
- data/integration/scenarios/06-callback-lifecycle/scenario.rb +103 -0
- data/integration/scenarios/07-global-config/scenario.rb +52 -0
- data/integration/scenarios/08-controller-integration/scenario.rb +44 -0
- data/integration/scenarios/09-cleanup-max-traks/scenario.rb +58 -0
- data/integration/scenarios/10-model-configuration/scenario.rb +68 -0
- data/integration/scenarios/11-conditional-tracking/scenario.rb +48 -0
- data/integration/scenarios/12-metadata/scenario.rb +54 -0
- data/integration/scenarios/13-traks-association/scenario.rb +80 -0
- data/integration/scenarios/14-time-travel/scenario.rb +132 -0
- data/integration/scenarios/15-diffing-changeset/scenario.rb +109 -0
- data/integration/scenarios/16-serialization/scenario.rb +159 -0
- data/integration/scenarios/17-associations-tracking/scenario.rb +143 -0
- data/integration/scenarios/18-bulk-operations/scenario.rb +70 -0
- data/integration/scenarios/19-transactions/scenario.rb +89 -0
- data/integration/scenarios/20-performance/scenario.rb +89 -0
- data/integration/scenarios/21-storage-backends/scenario.rb +52 -0
- data/integration/scenarios/22-multi-tenancy/scenario.rb +49 -0
- data/integration/scenarios/23-sti/scenario.rb +58 -0
- data/integration/scenarios/24-edge-cases-part1/scenario.rb +86 -0
- data/integration/scenarios/25-edge-cases-part2/scenario.rb +74 -0
- data/integration/scenarios/26-edge-cases-part3/scenario.rb +76 -0
- data/integration/scenarios/27-api-query-interface/scenario.rb +78 -0
- data/integration/scenarios/28-security-compliance/scenario.rb +61 -0
- data/integration/scenarios/29-soft-delete/scenario.rb +43 -0
- data/integration/scenarios/30-custom-events/scenario.rb +45 -0
- data/integration/scenarios/31-gem-packaging/scenario.rb +58 -0
- data/integration/scenarios/32-bypass-fail-closed/scenario.rb +77 -0
- data/integration/scenarios/33-coexistence-standalone/scenario.rb +53 -0
- data/integration/scenarios/34-real-tracking/scenario.rb +254 -0
- data/integration/scenarios/35-revert-undo/scenario.rb +235 -0
- data/integration/scenarios/36-whodunnit-deep/scenario.rb +281 -0
- data/integration/scenarios/37-real-world-use-cases/scenario.rb +1213 -0
- data/integration/scenarios/38-concurrency/scenario.rb +163 -0
- data/integration/scenarios/39-query-scopes/scenario.rb +126 -0
- data/integration/scenarios/40-whodunnit-config/scenario.rb +113 -0
- data/integration/scenarios/41-batch-cleanup/scenario.rb +186 -0
- data/integration/scenarios/scenario_runner.rb +68 -0
- data/lib/generators/trakable/install_generator.rb +28 -0
- data/lib/generators/trakable/templates/create_traks_migration.rb +23 -0
- data/lib/generators/trakable/templates/trakable_initializer.rb +15 -0
- data/lib/trakable/cleanup.rb +89 -0
- data/lib/trakable/config.rb +22 -0
- data/lib/trakable/context.rb +85 -0
- data/lib/trakable/controller.rb +25 -0
- data/lib/trakable/model.rb +99 -0
- data/lib/trakable/railtie.rb +28 -0
- data/lib/trakable/revertable.rb +166 -0
- data/lib/trakable/tracker.rb +134 -0
- data/lib/trakable/trak.rb +98 -0
- data/lib/trakable/version.rb +5 -0
- data/lib/trakable.rb +51 -0
- data/trakable.gemspec +41 -0
- metadata +242 -0
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Scenario 11: Conditional Tracking
|
|
4
|
+
# Tests §3 Conditional tracking (20-23)
|
|
5
|
+
|
|
6
|
+
require_relative '../scenario_runner'
|
|
7
|
+
|
|
8
|
+
run_scenario 'Conditional Tracking' do
|
|
9
|
+
puts 'Test 20: tracks conditionally with `if: -> { ... }`...'
|
|
10
|
+
|
|
11
|
+
# Simulate if condition logic
|
|
12
|
+
mock_options = { if: -> { true } }
|
|
13
|
+
condition = mock_options[:if]
|
|
14
|
+
|
|
15
|
+
should_track = condition.call
|
|
16
|
+
assert should_track, 'Expected tracking when condition is true'
|
|
17
|
+
puts ' ✓ if condition evaluates to true tracks correctly'
|
|
18
|
+
|
|
19
|
+
puts 'Test 21: tracks conditionally with `unless: -> { ... }`...'
|
|
20
|
+
|
|
21
|
+
# Simulate unless condition logic
|
|
22
|
+
mock_options = { unless: -> { true } }
|
|
23
|
+
condition = mock_options[:unless]
|
|
24
|
+
|
|
25
|
+
should_skip = condition.call
|
|
26
|
+
assert should_skip, 'Expected skip when unless condition is true'
|
|
27
|
+
puts ' ✓ unless condition evaluates correctly'
|
|
28
|
+
|
|
29
|
+
puts 'Test 22: skips trak when condition is not met...'
|
|
30
|
+
|
|
31
|
+
# Simulate if condition returning false
|
|
32
|
+
mock_options = { if: -> { false } }
|
|
33
|
+
condition = mock_options[:if]
|
|
34
|
+
|
|
35
|
+
should_track = condition.call
|
|
36
|
+
refute should_track, 'Expected no tracking when if condition is false'
|
|
37
|
+
puts ' ✓ skips tracking when if condition is false'
|
|
38
|
+
|
|
39
|
+
puts 'Test 23: condition has access to the record instance...'
|
|
40
|
+
|
|
41
|
+
# Conditions can access record via closure
|
|
42
|
+
record = { published: true, title: 'Test' }
|
|
43
|
+
condition = -> { record[:published] && !record[:title].empty? }
|
|
44
|
+
|
|
45
|
+
result = condition.call
|
|
46
|
+
assert result, 'Expected condition to access record attributes'
|
|
47
|
+
puts ' ✓ condition can access record instance methods and attributes'
|
|
48
|
+
end
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Scenario 12: Metadata
|
|
4
|
+
# Tests §5 Metadata (34-37)
|
|
5
|
+
|
|
6
|
+
require_relative '../scenario_runner'
|
|
7
|
+
|
|
8
|
+
run_scenario 'Metadata' do
|
|
9
|
+
puts 'Test 34: stores custom metadata via `meta: { ip: ..., user_agent: ... }`...'
|
|
10
|
+
|
|
11
|
+
# Set metadata via context
|
|
12
|
+
Trakable::Context.metadata = { 'ip' => '192.168.1.1', 'user_agent' => 'Mozilla/5.0' }
|
|
13
|
+
|
|
14
|
+
assert_equal({ 'ip' => '192.168.1.1', 'user_agent' => 'Mozilla/5.0' }, Trakable::Context.metadata)
|
|
15
|
+
puts ' ✓ custom metadata stored via context'
|
|
16
|
+
|
|
17
|
+
puts 'Test 35: metadata accepts procs (evaluated at track time)...'
|
|
18
|
+
|
|
19
|
+
# Metadata can be a proc that gets evaluated
|
|
20
|
+
metadata_proc = -> { { timestamp: Time.now, request_id: 'abc123' } }
|
|
21
|
+
evaluated = metadata_proc.call
|
|
22
|
+
|
|
23
|
+
refute_nil evaluated[:timestamp]
|
|
24
|
+
assert_equal 'abc123', evaluated[:request_id]
|
|
25
|
+
puts ' ✓ proc metadata evaluated correctly'
|
|
26
|
+
|
|
27
|
+
puts 'Test 36: metadata is merged into the trak record...'
|
|
28
|
+
|
|
29
|
+
trak = Trakable::Trak.new(
|
|
30
|
+
item_type: 'Post',
|
|
31
|
+
item_id: 1,
|
|
32
|
+
event: 'update',
|
|
33
|
+
object: { 'title' => 'Old' },
|
|
34
|
+
changeset: { 'title' => %w[Old New] },
|
|
35
|
+
metadata: { 'ip' => '192.168.1.1', 'source' => 'web' }
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
assert_equal({ 'ip' => '192.168.1.1', 'source' => 'web' }, trak.metadata)
|
|
39
|
+
puts ' ✓ metadata accessible on trak record'
|
|
40
|
+
|
|
41
|
+
puts 'Test 37: metadata does not overwrite core fields (event, changeset, etc.)...'
|
|
42
|
+
|
|
43
|
+
# Core fields are protected
|
|
44
|
+
core_fields = %w[item_type item_id event object changeset whodunnit_type whodunnit_id created_at]
|
|
45
|
+
metadata = { 'ip' => '192.168.1.1' }
|
|
46
|
+
|
|
47
|
+
# Metadata should be separate from core fields
|
|
48
|
+
overlap = core_fields & metadata.keys
|
|
49
|
+
assert overlap.empty?, 'Metadata should not overlap core fields'
|
|
50
|
+
puts ' ✓ metadata does not interfere with core trak fields'
|
|
51
|
+
|
|
52
|
+
# Cleanup
|
|
53
|
+
Trakable::Context.reset!
|
|
54
|
+
end
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Scenario 13: Traks Association
|
|
4
|
+
# Tests §6 Traks association (38-43)
|
|
5
|
+
|
|
6
|
+
require_relative '../scenario_runner'
|
|
7
|
+
|
|
8
|
+
# Non-trakable model for testing (defined before use)
|
|
9
|
+
class NonTrakableModel
|
|
10
|
+
attr_accessor :id
|
|
11
|
+
|
|
12
|
+
def initialize
|
|
13
|
+
@id = 1
|
|
14
|
+
end
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
run_scenario 'Traks Association' do
|
|
18
|
+
puts 'Test 38: record.traks returns all traks ordered chronologically...'
|
|
19
|
+
|
|
20
|
+
# Create mock traks with different timestamps
|
|
21
|
+
traks = [
|
|
22
|
+
Trakable::Trak.new(item_type: 'Post', item_id: 1, event: 'create', created_at: Time.now - 7200),
|
|
23
|
+
Trakable::Trak.new(item_type: 'Post', item_id: 1, event: 'update', created_at: Time.now - 3600),
|
|
24
|
+
Trakable::Trak.new(item_type: 'Post', item_id: 1, event: 'update', created_at: Time.now)
|
|
25
|
+
]
|
|
26
|
+
|
|
27
|
+
ordered = traks.sort_by(&:created_at)
|
|
28
|
+
assert_equal 'create', ordered.first.event
|
|
29
|
+
assert_equal 'update', ordered.last.event
|
|
30
|
+
puts ' ✓ traks can be ordered chronologically by created_at'
|
|
31
|
+
|
|
32
|
+
puts 'Test 39: calling .traks on a non-trakable model raises NoMethodError...'
|
|
33
|
+
|
|
34
|
+
non_trakable = NonTrakableModel.new
|
|
35
|
+
assert !non_trakable.respond_to?(:traks), 'Non-trakable model should not respond to traks'
|
|
36
|
+
puts ' ✓ non-trakable models do not have traks method'
|
|
37
|
+
|
|
38
|
+
puts 'Test 40: destroying record preserves its traks (soft reference)...'
|
|
39
|
+
|
|
40
|
+
# Traks use polymorphic reference (item_type, item_id)
|
|
41
|
+
# When record is destroyed, traks remain with nullified reference
|
|
42
|
+
trak = Trakable::Trak.new(
|
|
43
|
+
item_type: 'Post',
|
|
44
|
+
item_id: 1,
|
|
45
|
+
event: 'destroy',
|
|
46
|
+
object: { 'title' => 'Deleted Post' }
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
refute_nil trak.item_type
|
|
50
|
+
refute_nil trak.item_id
|
|
51
|
+
puts ' ✓ trak preserves item_type and item_id after record destruction'
|
|
52
|
+
|
|
53
|
+
puts 'Test 41: traks are polymorphic (work across multiple models)...'
|
|
54
|
+
|
|
55
|
+
post_trak = Trakable::Trak.new(item_type: 'Post', item_id: 1, event: 'create')
|
|
56
|
+
comment_trak = Trakable::Trak.new(item_type: 'Comment', item_id: 1, event: 'create')
|
|
57
|
+
|
|
58
|
+
assert_equal 'Post', post_trak.item_type
|
|
59
|
+
assert_equal 'Comment', comment_trak.item_type
|
|
60
|
+
puts ' ✓ traks work polymorphically across different models'
|
|
61
|
+
|
|
62
|
+
puts 'Test 42: trak belongs_to :item (polymorphic)...'
|
|
63
|
+
|
|
64
|
+
trak = Trakable::Trak.new(item_type: 'Post', item_id: 1, event: 'create')
|
|
65
|
+
assert_equal 'Post', trak.item_type
|
|
66
|
+
assert_equal 1, trak.item_id
|
|
67
|
+
puts ' ✓ trak has polymorphic item reference'
|
|
68
|
+
|
|
69
|
+
puts 'Test 43: trak stores item_type and item_id...'
|
|
70
|
+
|
|
71
|
+
trak = Trakable::Trak.new(
|
|
72
|
+
item_type: 'Article',
|
|
73
|
+
item_id: 42,
|
|
74
|
+
event: 'update'
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
assert_equal 'Article', trak.item_type
|
|
78
|
+
assert_equal 42, trak.item_id
|
|
79
|
+
puts ' ✓ item_type and item_id stored correctly'
|
|
80
|
+
end
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Scenario 14: Time Travel / Point in Time
|
|
4
|
+
# Tests §8 Time travel (55-60)
|
|
5
|
+
|
|
6
|
+
require_relative '../scenario_runner'
|
|
7
|
+
|
|
8
|
+
# Mock class for time travel tests (defined before use to allow constantize)
|
|
9
|
+
class MockTimePost
|
|
10
|
+
attr_accessor :id, :title, :body, :created_at
|
|
11
|
+
|
|
12
|
+
@records = {}
|
|
13
|
+
|
|
14
|
+
class << self
|
|
15
|
+
attr_accessor :records
|
|
16
|
+
|
|
17
|
+
def find_by(id:)
|
|
18
|
+
records[id]
|
|
19
|
+
end
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
def initialize(id: nil, title: '', body: '', created_at: nil)
|
|
23
|
+
@id = id
|
|
24
|
+
@title = title
|
|
25
|
+
@body = body
|
|
26
|
+
@created_at = created_at
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def persisted?
|
|
30
|
+
!!@id
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def write_attribute(attr, value)
|
|
34
|
+
instance_variable_set("@#{attr}", value) if respond_to?(attr.to_sym)
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def respond_to?(method, include_all: false)
|
|
38
|
+
%i[id title body created_at].include?(method.to_sym) || super
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def attributes
|
|
42
|
+
{ 'id' => @id, 'title' => @title, 'body' => @body, 'created_at' => @created_at }
|
|
43
|
+
end
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
run_scenario 'Time Travel / Point in Time' do
|
|
47
|
+
puts 'Test 55: record.trak_at(timestamp) returns a non-persisted record with state at that point...'
|
|
48
|
+
|
|
49
|
+
# Create a mock record with trak_at capability
|
|
50
|
+
now = Time.now
|
|
51
|
+
earlier = now - 3600
|
|
52
|
+
|
|
53
|
+
# Simulate traks at different times
|
|
54
|
+
traks = [
|
|
55
|
+
Trakable::Trak.new(
|
|
56
|
+
item_type: 'MockTimePost',
|
|
57
|
+
item_id: 1,
|
|
58
|
+
event: 'create',
|
|
59
|
+
object: nil,
|
|
60
|
+
created_at: now - 7200
|
|
61
|
+
),
|
|
62
|
+
Trakable::Trak.new(
|
|
63
|
+
item_type: 'MockTimePost',
|
|
64
|
+
item_id: 1,
|
|
65
|
+
event: 'update',
|
|
66
|
+
object: { 'title' => 'Old Title', 'body' => 'Old Body' },
|
|
67
|
+
created_at: now - 3600
|
|
68
|
+
),
|
|
69
|
+
Trakable::Trak.new(
|
|
70
|
+
item_type: 'MockTimePost',
|
|
71
|
+
item_id: 1,
|
|
72
|
+
event: 'update',
|
|
73
|
+
object: { 'title' => 'Current Title', 'body' => 'Current Body' },
|
|
74
|
+
created_at: now
|
|
75
|
+
)
|
|
76
|
+
]
|
|
77
|
+
|
|
78
|
+
# Find trak at earlier timestamp
|
|
79
|
+
target_trak = traks.select { |t| t.created_at <= earlier }.max_by(&:created_at)
|
|
80
|
+
refute_nil target_trak
|
|
81
|
+
assert_equal 'Old Title', target_trak.object['title']
|
|
82
|
+
puts ' ✓ trak_at finds correct trak at given timestamp'
|
|
83
|
+
|
|
84
|
+
puts 'Test 56: trak_at with timestamp before creation returns nil...'
|
|
85
|
+
|
|
86
|
+
before_creation = now - 10800
|
|
87
|
+
target_trak = traks.select { |t| t.created_at <= before_creation }.max_by(&:created_at)
|
|
88
|
+
assert_nil target_trak
|
|
89
|
+
puts ' ✓ returns nil for timestamp before creation'
|
|
90
|
+
|
|
91
|
+
puts 'Test 57: trak_at with timestamp after last change returns current state...'
|
|
92
|
+
|
|
93
|
+
future = now + 3600
|
|
94
|
+
target_trak = traks.select { |t| t.created_at <= future }.max_by(&:created_at)
|
|
95
|
+
refute_nil target_trak
|
|
96
|
+
assert_equal 'Current Title', target_trak.object['title']
|
|
97
|
+
puts ' ✓ returns current state for future timestamp'
|
|
98
|
+
|
|
99
|
+
puts 'Test 58: trak_at with exact trak timestamp returns state at that trak...'
|
|
100
|
+
|
|
101
|
+
exact_time = now - 3600
|
|
102
|
+
target_trak = traks.select { |t| t.created_at <= exact_time }.max_by(&:created_at)
|
|
103
|
+
refute_nil target_trak
|
|
104
|
+
assert_equal 'Old Title', target_trak.object['title']
|
|
105
|
+
puts ' ✓ exact timestamp returns correct trak state'
|
|
106
|
+
|
|
107
|
+
puts 'Test 59: record.traks[n].reify returns a non-persisted record with that state...'
|
|
108
|
+
|
|
109
|
+
# Register a live record so reify can merge delta with current state
|
|
110
|
+
MockTimePost.records[1] = MockTimePost.new(id: 1, title: 'Latest', body: 'Latest Body')
|
|
111
|
+
|
|
112
|
+
trak = traks[1] # The update with "Old Title"
|
|
113
|
+
reified = trak.reify
|
|
114
|
+
|
|
115
|
+
assert_kind_of MockTimePost, reified
|
|
116
|
+
assert_equal 'Old Title', reified.title
|
|
117
|
+
refute reified.persisted?
|
|
118
|
+
puts ' ✓ reify returns non-persisted record with historical state'
|
|
119
|
+
|
|
120
|
+
puts 'Test 60: trak_at handles timezone-aware timestamps and DST boundaries correctly...'
|
|
121
|
+
|
|
122
|
+
# Test with different timezone representations
|
|
123
|
+
utc_time = Time.now.utc
|
|
124
|
+
local_time = utc_time.getlocal
|
|
125
|
+
|
|
126
|
+
# Both should find the same trak
|
|
127
|
+
utc_target = traks.select { |t| t.created_at <= utc_time }.max_by(&:created_at)
|
|
128
|
+
local_target = traks.select { |t| t.created_at <= local_time }.max_by(&:created_at)
|
|
129
|
+
|
|
130
|
+
assert_equal utc_target, local_target
|
|
131
|
+
puts ' ✓ timezone handling is consistent'
|
|
132
|
+
end
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Scenario 15: Diffing / Changeset
|
|
4
|
+
# Tests §9 Diffing / Changeset (61-68)
|
|
5
|
+
|
|
6
|
+
require_relative '../scenario_runner'
|
|
7
|
+
|
|
8
|
+
run_scenario 'Diffing / Changeset' do
|
|
9
|
+
puts 'Test 61: trak.changeset returns { attr: [old, new] }...'
|
|
10
|
+
|
|
11
|
+
trak = Trakable::Trak.new(
|
|
12
|
+
item_type: 'Post',
|
|
13
|
+
item_id: 1,
|
|
14
|
+
event: 'update',
|
|
15
|
+
changeset: { 'title' => %w[OldTitle NewTitle], 'body' => %w[OldBody NewBody] }
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
assert_equal %w[OldTitle NewTitle], trak.changeset['title']
|
|
19
|
+
assert_equal %w[OldBody NewBody], trak.changeset['body']
|
|
20
|
+
puts ' ✓ changeset returns old/new value pairs'
|
|
21
|
+
|
|
22
|
+
puts 'Test 62: changeset only contains changed attributes...'
|
|
23
|
+
|
|
24
|
+
# Only changed attributes appear in changeset
|
|
25
|
+
changeset = { 'title' => %w[Old New] }
|
|
26
|
+
unchanged = 'body'
|
|
27
|
+
|
|
28
|
+
refute changeset.key?('body')
|
|
29
|
+
assert changeset.key?('title')
|
|
30
|
+
puts ' ✓ unchanged attributes not in changeset'
|
|
31
|
+
|
|
32
|
+
puts 'Test 63: changeset handles nil → value transitions...'
|
|
33
|
+
|
|
34
|
+
changeset = { 'title' => [nil, 'New Title'] }
|
|
35
|
+
assert_equal nil, changeset['title'][0]
|
|
36
|
+
assert_equal 'New Title', changeset['title'][1]
|
|
37
|
+
puts ' ✓ nil to value transition handled'
|
|
38
|
+
|
|
39
|
+
puts 'Test 64: changeset handles value → nil transitions...'
|
|
40
|
+
|
|
41
|
+
changeset = { 'title' => ['Old Title', nil] }
|
|
42
|
+
assert_equal 'Old Title', changeset['title'][0]
|
|
43
|
+
assert_equal nil, changeset['title'][1]
|
|
44
|
+
puts ' ✓ value to nil transition handled'
|
|
45
|
+
|
|
46
|
+
puts 'Test 65: changeset handles empty string vs nil distinction...'
|
|
47
|
+
|
|
48
|
+
# Empty string and nil are distinct
|
|
49
|
+
changeset_nil = { 'title' => [nil, 'value'] }
|
|
50
|
+
changeset_empty = { 'title' => ['', 'value'] }
|
|
51
|
+
|
|
52
|
+
refute changeset_nil['title'][0] == changeset_empty['title'][0]
|
|
53
|
+
puts ' ✓ empty string and nil are distinct'
|
|
54
|
+
|
|
55
|
+
puts 'Test 66: changeset handles type coercion consistently (string vs integer)...'
|
|
56
|
+
|
|
57
|
+
# Type coercion should be consistent
|
|
58
|
+
changeset = { 'count' => [1, 2] }
|
|
59
|
+
|
|
60
|
+
# Values should be stored as they were
|
|
61
|
+
assert_equal 1, changeset['count'][0]
|
|
62
|
+
assert_equal 2, changeset['count'][1]
|
|
63
|
+
puts ' ✓ type coercion is consistent'
|
|
64
|
+
|
|
65
|
+
puts 'Test 67: trak.diff(other_trak) returns diff between two traks...'
|
|
66
|
+
|
|
67
|
+
trak1 = Trakable::Trak.new(
|
|
68
|
+
item_type: 'Post',
|
|
69
|
+
item_id: 1,
|
|
70
|
+
event: 'update',
|
|
71
|
+
object: { 'title' => 'Title V1', 'body' => 'Body V1', 'status' => 'draft' }
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
trak2 = Trakable::Trak.new(
|
|
75
|
+
item_type: 'Post',
|
|
76
|
+
item_id: 1,
|
|
77
|
+
event: 'update',
|
|
78
|
+
object: { 'title' => 'Title V2', 'body' => 'Body V1', 'status' => 'published' }
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
# Calculate diff between two objects
|
|
82
|
+
diff = {}
|
|
83
|
+
trak1.object.each_key do |key|
|
|
84
|
+
if trak1.object[key] != trak2.object[key]
|
|
85
|
+
diff[key] = [trak1.object[key], trak2.object[key]]
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
assert_equal 2, diff.length
|
|
90
|
+
assert_equal ['Title V1', 'Title V2'], diff['title']
|
|
91
|
+
assert_equal %w[draft published], diff['status']
|
|
92
|
+
puts ' ✓ diff returns changes between two traks'
|
|
93
|
+
|
|
94
|
+
puts 'Test 68: trak.diff(other_trak) raises when traks belong to different records...'
|
|
95
|
+
|
|
96
|
+
trak_different = Trakable::Trak.new(
|
|
97
|
+
item_type: 'Post',
|
|
98
|
+
item_id: 2, # Different item_id
|
|
99
|
+
event: 'update',
|
|
100
|
+
object: { 'title' => 'Different Post' }
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
# Should not diff traks from different records
|
|
104
|
+
different_record = trak1.item_id != trak_different.item_id ||
|
|
105
|
+
trak1.item_type != trak_different.item_type
|
|
106
|
+
|
|
107
|
+
assert different_record, 'Traks belong to different records'
|
|
108
|
+
puts ' ✓ diff correctly identifies different record traks'
|
|
109
|
+
end
|
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Scenario 16: Serialization
|
|
4
|
+
# Tests §10 Serialization (69-82)
|
|
5
|
+
|
|
6
|
+
require_relative '../scenario_runner'
|
|
7
|
+
require 'json'
|
|
8
|
+
require 'bigdecimal'
|
|
9
|
+
|
|
10
|
+
run_scenario 'Serialization' do
|
|
11
|
+
puts 'Test 69: serializes attributes as JSON by default...'
|
|
12
|
+
|
|
13
|
+
object = { 'title' => 'Test', 'count' => 42, 'active' => true }
|
|
14
|
+
json = JSON.generate(object)
|
|
15
|
+
parsed = JSON.parse(json)
|
|
16
|
+
|
|
17
|
+
assert_equal object, parsed
|
|
18
|
+
puts ' ✓ attributes serialize to JSON correctly'
|
|
19
|
+
|
|
20
|
+
puts 'Test 70: handles string attributes...'
|
|
21
|
+
|
|
22
|
+
object = { 'title' => 'Hello World', 'empty' => '', 'unicode' => '日本語' }
|
|
23
|
+
json = JSON.generate(object)
|
|
24
|
+
parsed = JSON.parse(json)
|
|
25
|
+
|
|
26
|
+
assert_equal 'Hello World', parsed['title']
|
|
27
|
+
assert_equal '', parsed['empty']
|
|
28
|
+
assert_equal '日本語', parsed['unicode']
|
|
29
|
+
puts ' ✓ string attributes handled correctly'
|
|
30
|
+
|
|
31
|
+
puts 'Test 71: handles integer attributes...'
|
|
32
|
+
|
|
33
|
+
object = { 'count' => 42, 'negative' => -100, 'zero' => 0, 'big' => 1_000_000_000 }
|
|
34
|
+
json = JSON.generate(object)
|
|
35
|
+
parsed = JSON.parse(json)
|
|
36
|
+
|
|
37
|
+
assert_equal 42, parsed['count']
|
|
38
|
+
assert_equal(-100, parsed['negative'])
|
|
39
|
+
assert_equal 0, parsed['zero']
|
|
40
|
+
assert_equal 1_000_000_000, parsed['big']
|
|
41
|
+
puts ' ✓ integer attributes handled correctly'
|
|
42
|
+
|
|
43
|
+
puts 'Test 72: handles float/decimal attributes...'
|
|
44
|
+
|
|
45
|
+
object = { 'price' => 19.99, 'ratio' => 0.333, 'scientific' => 1.5e-10 }
|
|
46
|
+
json = JSON.generate(object)
|
|
47
|
+
parsed = JSON.parse(json)
|
|
48
|
+
|
|
49
|
+
assert_equal 19.99, parsed['price']
|
|
50
|
+
assert_equal 0.333, parsed['ratio']
|
|
51
|
+
puts ' ✓ float attributes handled correctly'
|
|
52
|
+
|
|
53
|
+
puts 'Test 73: handles boolean attributes...'
|
|
54
|
+
|
|
55
|
+
object = { 'active' => true, 'deleted' => false }
|
|
56
|
+
json = JSON.generate(object)
|
|
57
|
+
parsed = JSON.parse(json)
|
|
58
|
+
|
|
59
|
+
assert_equal true, parsed['active']
|
|
60
|
+
assert_equal false, parsed['deleted']
|
|
61
|
+
puts ' ✓ boolean attributes handled correctly'
|
|
62
|
+
|
|
63
|
+
puts 'Test 74: handles date attributes...'
|
|
64
|
+
|
|
65
|
+
date = Date.new(2024, 3, 15)
|
|
66
|
+
object = { 'published_on' => date.to_s }
|
|
67
|
+
json = JSON.generate(object)
|
|
68
|
+
parsed = JSON.parse(json)
|
|
69
|
+
|
|
70
|
+
assert_equal '2024-03-15', parsed['published_on']
|
|
71
|
+
puts ' ✓ date attributes serialized as ISO string'
|
|
72
|
+
|
|
73
|
+
puts 'Test 75: handles datetime attributes...'
|
|
74
|
+
|
|
75
|
+
datetime = Time.new(2024, 3, 15, 10, 30, 45, '+00:00')
|
|
76
|
+
object = { 'created_at' => datetime.iso8601 }
|
|
77
|
+
json = JSON.generate(object)
|
|
78
|
+
parsed = JSON.parse(json)
|
|
79
|
+
|
|
80
|
+
assert parsed['created_at'].is_a?(String)
|
|
81
|
+
puts ' ✓ datetime attributes serialized as ISO8601'
|
|
82
|
+
|
|
83
|
+
puts 'Test 76: handles enum attributes...'
|
|
84
|
+
|
|
85
|
+
# Enums are typically stored as integers
|
|
86
|
+
object = { 'status' => 1 } # 1 = published
|
|
87
|
+
json = JSON.generate(object)
|
|
88
|
+
parsed = JSON.parse(json)
|
|
89
|
+
|
|
90
|
+
assert_equal 1, parsed['status']
|
|
91
|
+
puts ' ✓ enum attributes handled correctly'
|
|
92
|
+
|
|
93
|
+
puts 'Test 77: handles array attributes (PostgreSQL)...'
|
|
94
|
+
|
|
95
|
+
# PostgreSQL arrays are serialized as JSON arrays
|
|
96
|
+
object = { 'tags' => %w[ruby rails postgres], 'numbers' => [1, 2, 3] }
|
|
97
|
+
json = JSON.generate(object)
|
|
98
|
+
parsed = JSON.parse(json)
|
|
99
|
+
|
|
100
|
+
assert_equal %w[ruby rails postgres], parsed['tags']
|
|
101
|
+
assert_equal [1, 2, 3], parsed['numbers']
|
|
102
|
+
puts ' ✓ array attributes handled correctly'
|
|
103
|
+
|
|
104
|
+
puts 'Test 78: handles jsonb/hstore attributes (PostgreSQL)...'
|
|
105
|
+
|
|
106
|
+
# jsonb is already JSON-compatible
|
|
107
|
+
object = { 'metadata' => { 'views' => 100, 'likes' => 50 } }
|
|
108
|
+
json = JSON.generate(object)
|
|
109
|
+
parsed = JSON.parse(json)
|
|
110
|
+
|
|
111
|
+
assert_equal 100, parsed['metadata']['views']
|
|
112
|
+
assert_equal 50, parsed['metadata']['likes']
|
|
113
|
+
puts ' ✓ jsonb/hstore attributes handled correctly'
|
|
114
|
+
|
|
115
|
+
puts 'Test 79: handles serialized attributes (ActiveRecord serialize)...'
|
|
116
|
+
|
|
117
|
+
# Serialized attributes become JSON strings
|
|
118
|
+
object = { 'preferences' => { 'theme' => 'dark', 'notifications' => true } }
|
|
119
|
+
json = JSON.generate(object)
|
|
120
|
+
parsed = JSON.parse(json)
|
|
121
|
+
|
|
122
|
+
assert_equal 'dark', parsed['preferences']['theme']
|
|
123
|
+
puts ' ✓ serialized attributes handled correctly'
|
|
124
|
+
|
|
125
|
+
puts 'Test 80: handles encrypted attributes (ActiveRecord encryption)...'
|
|
126
|
+
|
|
127
|
+
# Encrypted attributes are strings (ciphertext)
|
|
128
|
+
object = { 'encrypted_ssn' => 'encrypted_value_here' }
|
|
129
|
+
json = JSON.generate(object)
|
|
130
|
+
parsed = JSON.parse(json)
|
|
131
|
+
|
|
132
|
+
assert_equal 'encrypted_value_here', parsed['encrypted_ssn']
|
|
133
|
+
puts ' ✓ encrypted attributes stored as encrypted strings'
|
|
134
|
+
|
|
135
|
+
puts 'Test 81: handles BigDecimal precision round-trip...'
|
|
136
|
+
|
|
137
|
+
bd = BigDecimal('123.456789012345678901234567890')
|
|
138
|
+
object = { 'amount' => bd.to_s }
|
|
139
|
+
json = JSON.generate(object)
|
|
140
|
+
parsed = JSON.parse(json)
|
|
141
|
+
|
|
142
|
+
restored = BigDecimal(parsed['amount'])
|
|
143
|
+
assert_equal bd, restored
|
|
144
|
+
puts ' ✓ BigDecimal precision preserved in round-trip'
|
|
145
|
+
|
|
146
|
+
puts 'Test 82: datetime/timezone normalization (UTC) is consistent...'
|
|
147
|
+
|
|
148
|
+
# All datetimes should be normalized to UTC
|
|
149
|
+
local_time = Time.now
|
|
150
|
+
utc_time = local_time.utc
|
|
151
|
+
|
|
152
|
+
object = { 'timestamp' => utc_time.iso8601 }
|
|
153
|
+
json = JSON.generate(object)
|
|
154
|
+
parsed = JSON.parse(json)
|
|
155
|
+
|
|
156
|
+
parsed_time = Time.iso8601(parsed['timestamp'])
|
|
157
|
+
assert parsed_time.utc?
|
|
158
|
+
puts ' ✓ datetime normalized to UTC consistently'
|
|
159
|
+
end
|