tracebook 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. checksums.yaml +7 -0
  2. data/.yardopts +10 -0
  3. data/CHANGELOG.md +43 -0
  4. data/MIT-LICENSE +20 -0
  5. data/README.md +881 -0
  6. data/Rakefile +21 -0
  7. data/app/assets/images/tracebook/.keep +0 -0
  8. data/app/assets/javascripts/tracebook/application.js +88 -0
  9. data/app/assets/stylesheets/tracebook/application.css +173 -0
  10. data/app/controllers/concerns/.keep +0 -0
  11. data/app/controllers/tracebook/application_controller.rb +4 -0
  12. data/app/controllers/tracebook/exports_controller.rb +25 -0
  13. data/app/controllers/tracebook/interactions_controller.rb +71 -0
  14. data/app/helpers/tracebook/application_helper.rb +4 -0
  15. data/app/helpers/tracebook/interactions_helper.rb +35 -0
  16. data/app/jobs/tracebook/application_job.rb +5 -0
  17. data/app/jobs/tracebook/daily_rollups_job.rb +100 -0
  18. data/app/jobs/tracebook/export_job.rb +162 -0
  19. data/app/jobs/tracebook/persist_interaction_job.rb +160 -0
  20. data/app/mailers/tracebook/application_mailer.rb +6 -0
  21. data/app/models/concerns/.keep +0 -0
  22. data/app/models/tracebook/application_record.rb +5 -0
  23. data/app/models/tracebook/interaction.rb +100 -0
  24. data/app/models/tracebook/pricing_rule.rb +84 -0
  25. data/app/models/tracebook/redaction_rule.rb +81 -0
  26. data/app/models/tracebook/rollup_daily.rb +73 -0
  27. data/app/views/layouts/tracebook/application.html.erb +18 -0
  28. data/app/views/tracebook/interactions/index.html.erb +105 -0
  29. data/app/views/tracebook/interactions/show.html.erb +44 -0
  30. data/config/routes.rb +8 -0
  31. data/db/migrate/20241112000100_create_tracebook_interactions.rb +55 -0
  32. data/db/migrate/20241112000200_create_tracebook_rollups_dailies.rb +24 -0
  33. data/db/migrate/20241112000300_create_tracebook_pricing_rules.rb +21 -0
  34. data/db/migrate/20241112000400_create_tracebook_redaction_rules.rb +19 -0
  35. data/lib/tasks/tracebook_tasks.rake +4 -0
  36. data/lib/tasks/yard.rake +29 -0
  37. data/lib/tracebook/adapters/active_agent.rb +82 -0
  38. data/lib/tracebook/adapters/ruby_llm.rb +97 -0
  39. data/lib/tracebook/adapters.rb +6 -0
  40. data/lib/tracebook/config.rb +130 -0
  41. data/lib/tracebook/engine.rb +5 -0
  42. data/lib/tracebook/errors.rb +9 -0
  43. data/lib/tracebook/mappers/anthropic.rb +59 -0
  44. data/lib/tracebook/mappers/base.rb +38 -0
  45. data/lib/tracebook/mappers/ollama.rb +49 -0
  46. data/lib/tracebook/mappers/openai.rb +75 -0
  47. data/lib/tracebook/mappers.rb +283 -0
  48. data/lib/tracebook/normalized_interaction.rb +86 -0
  49. data/lib/tracebook/pricing/calculator.rb +39 -0
  50. data/lib/tracebook/pricing.rb +5 -0
  51. data/lib/tracebook/redaction_pipeline.rb +88 -0
  52. data/lib/tracebook/redactors/base.rb +29 -0
  53. data/lib/tracebook/redactors/card_pan.rb +15 -0
  54. data/lib/tracebook/redactors/email.rb +15 -0
  55. data/lib/tracebook/redactors/phone.rb +15 -0
  56. data/lib/tracebook/redactors.rb +8 -0
  57. data/lib/tracebook/result.rb +53 -0
  58. data/lib/tracebook/version.rb +3 -0
  59. data/lib/tracebook.rb +201 -0
  60. metadata +164 -0
@@ -0,0 +1,75 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "base"
4
+
5
+ module Tracebook
6
+ module Mappers
7
+ class OpenAI < Base
8
+ def normalize(raw_request:, raw_response:, meta: {})
9
+ request = symbolize(raw_request || {})
10
+ response = symbolize(raw_response || {})
11
+ metadata = build_metadata(response)
12
+ meta_info = indifferent_meta(meta)
13
+
14
+ build_interaction(
15
+ provider: "openai",
16
+ model: request[:model] || response[:model],
17
+ project: meta_info[:project],
18
+ request_payload: raw_request,
19
+ response_payload: raw_response,
20
+ request_text: join_messages(request[:messages]),
21
+ response_text: first_choice_text(response),
22
+ input_tokens: usage_tokens(response, :prompt_tokens),
23
+ output_tokens: usage_tokens(response, :completion_tokens),
24
+ latency_ms: meta_info[:latency_ms],
25
+ status: meta_info[:status]&.to_sym || default_status(response),
26
+ error_class: nil,
27
+ error_message: nil,
28
+ tags: Array(meta_info[:tags]).compact,
29
+ metadata: metadata,
30
+ user: meta_info[:user],
31
+ parent_id: meta_info[:parent_id],
32
+ session_id: meta_info[:session_id]
33
+ )
34
+ end
35
+
36
+ private
37
+
38
+ def join_messages(messages)
39
+ Array(messages).map { |message| message.with_indifferent_access[:content].to_s }.reject(&:empty?).join("\n\n")
40
+ end
41
+
42
+ def first_choice(response)
43
+ Array(response[:choices]).first || {}
44
+ end
45
+
46
+ def first_choice_text(response)
47
+ choice = first_choice(response)
48
+ message = choice[:message] || {}
49
+ message.with_indifferent_access[:content].to_s
50
+ end
51
+
52
+ def usage_tokens(response, key)
53
+ usage = response[:usage] || {}
54
+ usage.with_indifferent_access[key]&.to_i
55
+ end
56
+
57
+ def build_metadata(response)
58
+ choice = first_choice(response)
59
+ metadata = {}
60
+ metadata["finish_reason"] = choice[:finish_reason] if choice[:finish_reason]
61
+ metadata
62
+ end
63
+
64
+ def default_status(response)
65
+ finish_reason = first_choice(response)[:finish_reason]
66
+ return :canceled if finish_reason == "length"
67
+ return :error if finish_reason == "error"
68
+
69
+ :success
70
+ end
71
+ end
72
+ end
73
+ end
74
+
75
+ TraceBook = Tracebook unless defined?(TraceBook)
@@ -0,0 +1,283 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "active_support/core_ext/hash/indifferent_access"
4
+ require "active_support/core_ext/object/deep_dup"
5
+ require_relative "mappers/base"
6
+ require_relative "mappers/openai"
7
+ require_relative "mappers/anthropic"
8
+ require_relative "mappers/ollama"
9
+
10
+ module Tracebook
11
+ # Mappers normalize provider-specific request/response formats into TraceBook's
12
+ # standard {NormalizedInteraction} structure.
13
+ #
14
+ # Built-in mappers exist for OpenAI, Anthropic, and Ollama. For other providers,
15
+ # a fallback mapper is used which preserves the raw payloads.
16
+ #
17
+ # @example Using the mapper in an adapter
18
+ # normalized = Tracebook::Mappers.normalize(
19
+ # "openai",
20
+ # raw_request: { model: "gpt-4o", messages: messages },
21
+ # raw_response: openai_response,
22
+ # meta: { project: "chatbot", user: current_user, latency_ms: 200 }
23
+ # )
24
+ # TraceBook.record!(**normalized.to_h)
25
+ #
26
+ # @example Creating a custom mapper
27
+ # # See {Mappers::Base} for the base class
28
+ # class Mappers::Cohere < Mappers::Base
29
+ # def self.normalize(raw_request:, raw_response:, meta: {})
30
+ # # Your normalization logic
31
+ # build_interaction(provider: "cohere", model: ..., ...)
32
+ # end
33
+ # end
34
+ #
35
+ # @see Mappers::Base
36
+ # @see NormalizedInteraction
37
+ module Mappers
38
+ extend self
39
+
40
+ # Normalizes a provider's request/response into standard format.
41
+ #
42
+ # Routes to provider-specific mappers for OpenAI, Anthropic, and Ollama.
43
+ # Falls back to a generic mapper for unknown providers.
44
+ #
45
+ # @param provider [String] Provider name ("openai", "anthropic", "ollama", etc.)
46
+ # @param raw_request [Hash] The original request sent to the provider
47
+ # @param raw_response [Hash] The original response from the provider
48
+ # @param meta [Hash] Additional metadata (project, user, session_id, tags, etc.)
49
+ #
50
+ # @option meta [String] :project Project name for filtering
51
+ # @option meta [ActiveRecord::Base] :user Associated user
52
+ # @option meta [String] :session_id Session identifier
53
+ # @option meta [Integer] :parent_id Parent interaction ID
54
+ # @option meta [Array<String>] :tags Labels for filtering
55
+ # @option meta [Integer] :latency_ms Request duration in milliseconds
56
+ # @option meta [Symbol] :status :success, :error, or :canceled
57
+ # @option meta [String] :error_class Exception class name (for errors)
58
+ # @option meta [String] :error_message Exception message (for errors)
59
+ #
60
+ # @return [NormalizedInteraction] Normalized interaction ready for {TraceBook.record!}
61
+ #
62
+ # @example Normalizing an OpenAI response
63
+ # normalized = Tracebook::Mappers.normalize(
64
+ # "openai",
65
+ # raw_request: {
66
+ # model: "gpt-4o",
67
+ # messages: [{ role: "user", content: "Hello" }]
68
+ # },
69
+ # raw_response: {
70
+ # choices: [{ message: { content: "Hi!" } }],
71
+ # usage: { prompt_tokens: 10, completion_tokens: 5 }
72
+ # },
73
+ # meta: { latency_ms: 150, user: current_user }
74
+ # )
75
+ def normalize(provider, raw_request:, raw_response:, meta: {})
76
+ case provider.to_s
77
+ when "openai"
78
+ normalize_openai(raw_request, raw_response, meta)
79
+ when "anthropic"
80
+ normalize_anthropic(raw_request, raw_response, meta)
81
+ when "ollama"
82
+ normalize_ollama(raw_request, raw_response, meta)
83
+ else
84
+ fallback_normalized(provider, raw_request, raw_response, meta)
85
+ end
86
+ end
87
+
88
+ private
89
+
90
+ def normalize_openai(raw_request, raw_response, meta)
91
+ request = symbolize(raw_request || {})
92
+ response = symbolize(raw_response || {})
93
+ metadata = openai_metadata(response)
94
+ meta_info = indifferent_meta(meta)
95
+
96
+ Tracebook::NormalizedInteraction.new(
97
+ provider: "openai",
98
+ model: request[:model] || response[:model],
99
+ project: meta_info[:project],
100
+ request_payload: raw_request,
101
+ response_payload: raw_response,
102
+ request_text: join_messages(request[:messages]),
103
+ response_text: openai_response_text(response),
104
+ input_tokens: openai_usage_tokens(response, :prompt_tokens),
105
+ output_tokens: openai_usage_tokens(response, :completion_tokens),
106
+ latency_ms: meta_info[:latency_ms],
107
+ status: meta_info[:status]&.to_sym || openai_status(response),
108
+ error_class: nil,
109
+ error_message: nil,
110
+ tags: Array(meta_info[:tags]).compact,
111
+ metadata: metadata,
112
+ user: meta_info[:user],
113
+ parent_id: meta_info[:parent_id],
114
+ session_id: meta_info[:session_id]
115
+ )
116
+ end
117
+
118
+ def normalize_anthropic(raw_request, raw_response, meta)
119
+ request = symbolize(raw_request || {})
120
+ response = symbolize(raw_response || {})
121
+ meta_info = indifferent_meta(meta)
122
+
123
+ Tracebook::NormalizedInteraction.new(
124
+ provider: "anthropic",
125
+ model: request[:model] || response[:model],
126
+ project: meta_info[:project],
127
+ request_payload: raw_request,
128
+ response_payload: raw_response,
129
+ request_text: extract_anthropic_messages(request[:messages]),
130
+ response_text: extract_blocks(response[:content]).join("\n\n"),
131
+ input_tokens: anthropic_usage(response, :input_tokens),
132
+ output_tokens: anthropic_usage(response, :output_tokens),
133
+ latency_ms: meta_info[:latency_ms],
134
+ status: meta_info[:status]&.to_sym || :success,
135
+ error_class: nil,
136
+ error_message: nil,
137
+ tags: Array(meta_info[:tags]).compact,
138
+ metadata: {},
139
+ user: meta_info[:user],
140
+ parent_id: meta_info[:parent_id],
141
+ session_id: meta_info[:session_id]
142
+ )
143
+ end
144
+
145
+ def normalize_ollama(raw_request, raw_response, meta)
146
+ request = symbolize(raw_request || {})
147
+ response = symbolize(raw_response || {})
148
+ meta_info = indifferent_meta(meta)
149
+
150
+ metadata = {}
151
+ metadata["eval_count"] = response[:eval_count] if response.key?(:eval_count)
152
+
153
+ Tracebook::NormalizedInteraction.new(
154
+ provider: "ollama",
155
+ model: request[:model] || response[:model],
156
+ project: meta_info[:project],
157
+ request_payload: raw_request,
158
+ response_payload: raw_response,
159
+ request_text: request[:prompt] || request[:input],
160
+ response_text: response[:response],
161
+ input_tokens: response[:prompt_eval_count],
162
+ output_tokens: response[:eval_count],
163
+ latency_ms: meta_info[:latency_ms] || to_milliseconds(response[:total_duration]),
164
+ status: meta_info[:status]&.to_sym || :success,
165
+ error_class: nil,
166
+ error_message: nil,
167
+ tags: Array(meta_info[:tags]).compact,
168
+ metadata: metadata,
169
+ user: meta_info[:user],
170
+ parent_id: meta_info[:parent_id],
171
+ session_id: meta_info[:session_id]
172
+ )
173
+ end
174
+
175
+ def fallback_normalized(provider, raw_request, raw_response, meta)
176
+ request = symbolize(raw_request || {})
177
+ response = symbolize(raw_response || {})
178
+ meta_info = indifferent_meta(meta)
179
+
180
+ Tracebook::NormalizedInteraction.new(
181
+ provider: provider.to_s,
182
+ model: request[:model] || response[:model],
183
+ project: meta_info[:project],
184
+ request_payload: raw_request,
185
+ response_payload: raw_response,
186
+ request_text: meta_info[:request_text],
187
+ response_text: meta_info[:response_text],
188
+ input_tokens: meta_info[:input_tokens],
189
+ output_tokens: meta_info[:output_tokens],
190
+ latency_ms: meta_info[:latency_ms],
191
+ status: meta_info[:status]&.to_sym || :success,
192
+ error_class: meta_info[:error_class],
193
+ error_message: meta_info[:error_message],
194
+ tags: Array(meta_info[:tags]).compact,
195
+ metadata: meta_info[:metadata] || {},
196
+ user: meta_info[:user],
197
+ parent_id: meta_info[:parent_id],
198
+ session_id: meta_info[:session_id]
199
+ )
200
+ end
201
+
202
+ # OpenAI helpers
203
+ def join_messages(messages)
204
+ Array(messages).map { |message| message.with_indifferent_access[:content].to_s }.reject(&:empty?).join("\n\n")
205
+ end
206
+
207
+ def openai_first_choice(response)
208
+ choices = Array(response[:choices])
209
+ choices.first&.with_indifferent_access || {}
210
+ end
211
+
212
+ def openai_response_text(response)
213
+ choice = openai_first_choice(response)
214
+ message = choice[:message] || {}
215
+ message.with_indifferent_access[:content].to_s
216
+ end
217
+
218
+ def openai_usage_tokens(response, key)
219
+ usage = response[:usage] || {}
220
+ usage.with_indifferent_access[key]&.to_i
221
+ end
222
+
223
+ def openai_metadata(response)
224
+ choice = openai_first_choice(response)
225
+ metadata = {}
226
+ metadata["finish_reason"] = choice[:finish_reason] if choice[:finish_reason]
227
+ metadata
228
+ end
229
+
230
+ def openai_status(response)
231
+ finish_reason = openai_first_choice(response)[:finish_reason]
232
+ return :canceled if finish_reason == "length"
233
+ return :error if finish_reason == "error"
234
+
235
+ :success
236
+ end
237
+
238
+ # Anthropic helpers
239
+ def extract_anthropic_messages(messages)
240
+ Array(messages).flat_map do |message|
241
+ message = message.respond_to?(:with_indifferent_access) ? message.with_indifferent_access : message
242
+ extract_blocks(message[:content])
243
+ end.join("\n\n")
244
+ end
245
+
246
+ def extract_blocks(blocks)
247
+ Array(blocks).flat_map do |block|
248
+ block = block.respond_to?(:with_indifferent_access) ? block.with_indifferent_access : block
249
+ case block[:type]
250
+ when "text"
251
+ block[:text]
252
+ when "input_text"
253
+ block[:text]
254
+ else
255
+ nil
256
+ end
257
+ end.compact
258
+ end
259
+
260
+ def anthropic_usage(response, key)
261
+ usage = response[:usage] || {}
262
+ usage.with_indifferent_access[key]&.to_i
263
+ end
264
+
265
+ # Ollama helpers
266
+ def to_milliseconds(value)
267
+ return unless value
268
+
269
+ (value.to_f * 1000).to_i
270
+ end
271
+
272
+ # Common helpers
273
+ def indifferent_meta(meta)
274
+ (meta || {}).with_indifferent_access
275
+ end
276
+
277
+ def symbolize(hash)
278
+ hash.deep_dup.transform_keys { |key| key.respond_to?(:to_sym) ? key.to_sym : key }
279
+ end
280
+ end
281
+ end
282
+
283
+ TraceBook = Tracebook unless defined?(TraceBook)
@@ -0,0 +1,86 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Tracebook
4
+ # Normalized representation of an LLM interaction.
5
+ #
6
+ # This immutable data structure provides a standard format for LLM interactions
7
+ # across different providers. Mappers convert provider-specific formats into this
8
+ # structure before persistence.
9
+ #
10
+ # @attr provider [String] Provider name (e.g., "openai", "anthropic")
11
+ # @attr model [String] Model identifier (e.g., "gpt-4o", "claude-3-5-sonnet")
12
+ # @attr project [String, nil] Project name for filtering
13
+ # @attr request_payload [Hash] Full request sent to provider (will be encrypted)
14
+ # @attr response_payload [Hash] Full response from provider (will be encrypted)
15
+ # @attr request_text [String, nil] Human-readable request summary
16
+ # @attr response_text [String, nil] Human-readable response summary
17
+ # @attr input_tokens [Integer, nil] Prompt token count
18
+ # @attr output_tokens [Integer, nil] Completion token count
19
+ # @attr latency_ms [Integer, nil] Request duration in milliseconds
20
+ # @attr status [Symbol, String] :success, :error, or :canceled
21
+ # @attr error_class [String, nil] Exception class name on failure
22
+ # @attr error_message [String, nil] Exception message on failure
23
+ # @attr tags [Array<String>] Labels for filtering
24
+ # @attr metadata [Hash] Custom metadata
25
+ # @attr user [ActiveRecord::Base, nil] Associated user (polymorphic)
26
+ # @attr parent_id [Integer, nil] Parent interaction ID for hierarchical chains
27
+ # @attr session_id [String, nil] Session identifier for grouping related calls
28
+ #
29
+ # @example Creating a normalized interaction
30
+ # interaction = NormalizedInteraction.new(
31
+ # provider: "openai",
32
+ # model: "gpt-4o",
33
+ # request_payload: { messages: messages },
34
+ # response_payload: response,
35
+ # input_tokens: 100,
36
+ # output_tokens: 50,
37
+ # status: :success
38
+ # )
39
+ #
40
+ # @see Mappers
41
+ NormalizedInteraction = Data.define(
42
+ :provider,
43
+ :model,
44
+ :project,
45
+ :request_payload,
46
+ :response_payload,
47
+ :request_text,
48
+ :response_text,
49
+ :input_tokens,
50
+ :output_tokens,
51
+ :latency_ms,
52
+ :status,
53
+ :error_class,
54
+ :error_message,
55
+ :tags,
56
+ :metadata,
57
+ :user,
58
+ :parent_id,
59
+ :session_id
60
+ ) do
61
+ def initialize(
62
+ provider:,
63
+ model:,
64
+ project: nil,
65
+ request_payload: {},
66
+ response_payload: {},
67
+ request_text: nil,
68
+ response_text: nil,
69
+ input_tokens: nil,
70
+ output_tokens: nil,
71
+ latency_ms: nil,
72
+ status: "success",
73
+ error_class: nil,
74
+ error_message: nil,
75
+ tags: [],
76
+ metadata: {},
77
+ user: nil,
78
+ parent_id: nil,
79
+ session_id: nil
80
+ )
81
+ super
82
+ end
83
+ end
84
+ end
85
+
86
+ TraceBook = Tracebook unless defined?(TraceBook)
@@ -0,0 +1,39 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Tracebook
4
+ module Pricing
5
+ CostBreakdown = Data.define(:input_cents, :output_cents, :total_cents, :currency)
6
+
7
+ module Calculator
8
+ extend self
9
+
10
+ def call(provider:, model:, input_tokens:, output_tokens:, occurred_at: Time.current)
11
+ rule = matching_rule(provider, model, occurred_at)
12
+ return CostBreakdown.new(input_cents: 0, output_cents: 0, total_cents: 0, currency: Tracebook.config.default_currency) unless rule
13
+
14
+ input_cents = cost_for(rule.input_cents_per_unit, input_tokens)
15
+ output_cents = cost_for(rule.output_cents_per_unit, output_tokens)
16
+ CostBreakdown.new(
17
+ input_cents: input_cents,
18
+ output_cents: output_cents,
19
+ total_cents: input_cents + output_cents,
20
+ currency: rule.currency
21
+ )
22
+ end
23
+
24
+ def matching_rule(provider, model, occurred_at)
25
+ Tracebook::PricingRule.where(provider: provider).select do |rule|
26
+ rule.matches_model?(model) && rule.active_on?(occurred_at.to_date)
27
+ end.min_by(&:effective_from)
28
+ end
29
+
30
+ def cost_for(cents_per_unit, tokens)
31
+ return 0 if cents_per_unit.to_i <= 0 || tokens.to_i <= 0
32
+
33
+ (tokens.to_i / 1000.0 * cents_per_unit.to_i).round
34
+ end
35
+ end
36
+ end
37
+ end
38
+
39
+ TraceBook = Tracebook unless defined?(TraceBook)
@@ -0,0 +1,5 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "pricing/calculator"
4
+
5
+ TraceBook = Tracebook unless defined?(TraceBook)
@@ -0,0 +1,88 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "active_support/core_ext/object/deep_dup"
4
+
5
+ module Tracebook
6
+ class RedactionPipeline
7
+ attr_reader :config
8
+
9
+ def initialize(config: Tracebook.config)
10
+ @config = config
11
+ end
12
+
13
+ def call(normalized)
14
+ data = normalized.to_h.deep_dup
15
+
16
+ apply_callable_redactors!(data)
17
+ apply_database_rules!(data)
18
+
19
+ NormalizedInteraction.new(**data)
20
+ end
21
+
22
+ private
23
+
24
+ def apply_callable_redactors!(data)
25
+ redactors = Array(config.redactors) + Array(config.custom_redactors)
26
+ redactors.each do |redactor|
27
+ apply_to_request!(data, redactor)
28
+ apply_to_response!(data, redactor)
29
+ apply_to_metadata!(data, redactor)
30
+ end
31
+ end
32
+
33
+ def apply_database_rules!(data)
34
+ Tracebook::RedactionRule.where(enabled: true).order(:priority).find_each do |rule|
35
+ callable = ->(value) { redact_string(value, rule.compiled_pattern, rule.replacement) }
36
+
37
+ case rule.applies_to.to_sym
38
+ when :request
39
+ apply_to_request!(data, callable)
40
+ when :response
41
+ apply_to_response!(data, callable)
42
+ when :both
43
+ apply_to_request!(data, callable)
44
+ apply_to_response!(data, callable)
45
+ when :metadata
46
+ apply_to_metadata!(data, callable)
47
+ end
48
+ end
49
+ end
50
+
51
+ def apply_to_request!(data, redactor)
52
+ data[:request_payload] = deep_transform(data[:request_payload], redactor)
53
+ data[:request_text] = redactor.call(data[:request_text]) if data[:request_text].is_a?(String)
54
+ end
55
+
56
+ def apply_to_response!(data, redactor)
57
+ data[:response_payload] = deep_transform(data[:response_payload], redactor)
58
+ data[:response_text] = redactor.call(data[:response_text]) if data[:response_text].is_a?(String)
59
+ end
60
+
61
+ def apply_to_metadata!(data, redactor)
62
+ data[:metadata] = deep_transform(data[:metadata], redactor)
63
+ end
64
+
65
+ def deep_transform(value, redactor)
66
+ case value
67
+ when String
68
+ redactor.call(value)
69
+ when Hash
70
+ value.each_with_object({}) do |(key, nested), memo|
71
+ memo[key] = deep_transform(nested, redactor)
72
+ end
73
+ when Array
74
+ value.map { |nested| deep_transform(nested, redactor) }
75
+ else
76
+ value
77
+ end
78
+ end
79
+
80
+ def redact_string(value, pattern, replacement)
81
+ return value unless value.is_a?(String)
82
+
83
+ value.gsub(pattern, replacement)
84
+ end
85
+ end
86
+ end
87
+
88
+ TraceBook = Tracebook unless defined?(TraceBook)
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Tracebook
4
+ module Redactors
5
+ class Base
6
+ def call(value)
7
+ return value unless value.is_a?(String)
8
+
9
+ value.gsub(pattern, replacement)
10
+ end
11
+
12
+ def applies_to
13
+ :both
14
+ end
15
+
16
+ private
17
+
18
+ def pattern
19
+ raise NotImplementedError, "implement in subclasses"
20
+ end
21
+
22
+ def replacement
23
+ "[REDACTED]"
24
+ end
25
+ end
26
+ end
27
+ end
28
+
29
+ TraceBook = Tracebook unless defined?(TraceBook)
@@ -0,0 +1,15 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Tracebook
4
+ module Redactors
5
+ class CardPAN < Base
6
+ private
7
+
8
+ def pattern
9
+ /\b(?:\d[ -]*?){13,16}\b/
10
+ end
11
+ end
12
+ end
13
+ end
14
+
15
+ TraceBook = Tracebook unless defined?(TraceBook)
@@ -0,0 +1,15 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Tracebook
4
+ module Redactors
5
+ class Email < Base
6
+ private
7
+
8
+ def pattern
9
+ /\b[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,}\b/i
10
+ end
11
+ end
12
+ end
13
+ end
14
+
15
+ TraceBook = Tracebook unless defined?(TraceBook)
@@ -0,0 +1,15 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Tracebook
4
+ module Redactors
5
+ class Phone < Base
6
+ private
7
+
8
+ def pattern
9
+ /(?:\+?\d{1,3}[\s.-]?)?(?:\(\d{3}\)|\d{3})[\s.-]?\d{3}[\s.-]?\d{4}/
10
+ end
11
+ end
12
+ end
13
+ end
14
+
15
+ TraceBook = Tracebook unless defined?(TraceBook)
@@ -0,0 +1,8 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "redactors/base"
4
+ require_relative "redactors/email"
5
+ require_relative "redactors/phone"
6
+ require_relative "redactors/card_pan"
7
+
8
+ TraceBook = Tracebook unless defined?(TraceBook)