uncaught 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/lib/uncaught/breadcrumbs.rb +94 -0
- data/lib/uncaught/client.rb +248 -0
- data/lib/uncaught/env_detector.rb +102 -0
- data/lib/uncaught/fingerprint.rb +144 -0
- data/lib/uncaught/integrations/rails.rb +54 -0
- data/lib/uncaught/integrations/sinatra.rb +57 -0
- data/lib/uncaught/prompt_builder.rb +174 -0
- data/lib/uncaught/rate_limiter.rb +65 -0
- data/lib/uncaught/sanitizer.rb +111 -0
- data/lib/uncaught/transport.rb +340 -0
- data/lib/uncaught/types.rb +84 -0
- data/lib/uncaught/version.rb +5 -0
- data/lib/uncaught.rb +122 -0
- metadata +55 -0
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Uncaught
|
|
4
|
+
module PromptBuilder
|
|
5
|
+
module_function
|
|
6
|
+
|
|
7
|
+
# Build a structured Markdown prompt that can be pasted into an AI assistant
|
|
8
|
+
# to diagnose and fix the production error described by the event.
|
|
9
|
+
#
|
|
10
|
+
# Empty sections are omitted to keep the prompt concise.
|
|
11
|
+
#
|
|
12
|
+
# @param event [UncaughtEvent]
|
|
13
|
+
# @return [String]
|
|
14
|
+
def build(event)
|
|
15
|
+
sections = []
|
|
16
|
+
|
|
17
|
+
# ----- Intro -----------------------------------------------------------
|
|
18
|
+
sections << "I have a production bug in my application that I need help diagnosing and fixing.\n"
|
|
19
|
+
|
|
20
|
+
# ----- Error -----------------------------------------------------------
|
|
21
|
+
if event.error
|
|
22
|
+
location = extract_location(event.error.stack)
|
|
23
|
+
lines = ["## Error", ""]
|
|
24
|
+
lines << "- **Type:** #{event.error.type || 'Error'}"
|
|
25
|
+
lines << "- **Message:** #{event.error.message || '(no message)'}"
|
|
26
|
+
lines << "- **Location:** #{location}" if location
|
|
27
|
+
sections << lines.join("\n")
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
# ----- Stack Trace -----------------------------------------------------
|
|
31
|
+
stack_source = event.error&.resolved_stack || event.error&.stack
|
|
32
|
+
if stack_source && !stack_source.empty?
|
|
33
|
+
frames = stack_source.split("\n").first(15).map(&:rstrip).join("\n")
|
|
34
|
+
label = event.error&.resolved_stack ? "Stack Trace (source-mapped)" : "Stack Trace"
|
|
35
|
+
sections << "## #{label}\n\n```\n#{frames}\n```"
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
# ----- Failed Operation ------------------------------------------------
|
|
39
|
+
if event.operation
|
|
40
|
+
sections << format_operation(event.operation)
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
# ----- HTTP Request Context --------------------------------------------
|
|
44
|
+
if event.request
|
|
45
|
+
sections << format_request(event.request)
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
# ----- User Session (last 5 breadcrumbs) -------------------------------
|
|
49
|
+
if event.breadcrumbs && !event.breadcrumbs.empty?
|
|
50
|
+
sections << format_breadcrumbs(event.breadcrumbs)
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
# ----- Environment -----------------------------------------------------
|
|
54
|
+
if event.environment
|
|
55
|
+
sections << format_environment(event.environment)
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
# ----- Component Stack -------------------------------------------------
|
|
59
|
+
if event.error&.component_stack && !event.error.component_stack.empty?
|
|
60
|
+
sections << "## React Component Stack\n\n```\n#{event.error.component_stack.strip}\n```"
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
# ----- What I need -----------------------------------------------------
|
|
64
|
+
sections << [
|
|
65
|
+
"## What I need",
|
|
66
|
+
"",
|
|
67
|
+
"1. **Root cause analysis** -- explain why this error is occurring.",
|
|
68
|
+
"2. **A fix** -- provide the corrected code with an explanation of the changes.",
|
|
69
|
+
"3. **Prevention** -- suggest any guards or tests to prevent this from happening again."
|
|
70
|
+
].join("\n")
|
|
71
|
+
|
|
72
|
+
sections.join("\n\n") + "\n"
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
# -------------------------------------------------------------------------
|
|
76
|
+
# Internal helpers
|
|
77
|
+
# -------------------------------------------------------------------------
|
|
78
|
+
|
|
79
|
+
# Extract the top-most location (file:line:col) from a stack trace string.
|
|
80
|
+
def extract_location(stack)
|
|
81
|
+
return nil unless stack
|
|
82
|
+
|
|
83
|
+
stack.split("\n").each do |line|
|
|
84
|
+
trimmed = line.strip
|
|
85
|
+
|
|
86
|
+
# V8: " at fn (file:line:col)"
|
|
87
|
+
v8 = trimmed.match(/at\s+(?:.+?\s+\()?(.+?:\d+:\d+)\)?/)
|
|
88
|
+
return v8[1] if v8
|
|
89
|
+
|
|
90
|
+
# SpiderMonkey / JSC: "fn@file:line:col"
|
|
91
|
+
sm = trimmed.match(/@(.+?:\d+:\d+)/)
|
|
92
|
+
return sm[1] if sm
|
|
93
|
+
|
|
94
|
+
# Ruby: "/path/to/file.rb:42:in `method'"
|
|
95
|
+
rb = trimmed.match(%r{(.+?:\d+):in\s+})
|
|
96
|
+
return rb[1] if rb
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
nil
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
def format_operation(op)
|
|
103
|
+
lines = ["## Failed Operation", ""]
|
|
104
|
+
lines << "- **Provider:** #{op.provider}"
|
|
105
|
+
lines << "- **Type:** #{op.type}"
|
|
106
|
+
lines << "- **Method:** #{op.method}"
|
|
107
|
+
if op.params
|
|
108
|
+
lines << "- **Params:**"
|
|
109
|
+
lines << "```json"
|
|
110
|
+
lines << JSON.pretty_generate(op.params)
|
|
111
|
+
lines << "```"
|
|
112
|
+
end
|
|
113
|
+
lines << "- **Error Code:** #{op.error_code}" if op.error_code
|
|
114
|
+
lines << "- **Error Details:** #{op.error_details}" if op.error_details
|
|
115
|
+
lines.join("\n")
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
def format_request(req)
|
|
119
|
+
lines = ["## HTTP Request Context", ""]
|
|
120
|
+
lines << "- **Method:** #{req.method}" if req.method
|
|
121
|
+
lines << "- **URL:** #{req.url}" if req.url
|
|
122
|
+
if req.body
|
|
123
|
+
lines << "- **Body:**"
|
|
124
|
+
lines << "```json"
|
|
125
|
+
lines << (req.body.is_a?(String) ? req.body : JSON.pretty_generate(req.body))
|
|
126
|
+
lines << "```"
|
|
127
|
+
end
|
|
128
|
+
lines.join("\n")
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
def format_breadcrumbs(crumbs)
|
|
132
|
+
recent = crumbs.last(5)
|
|
133
|
+
lines = ["## User Session", ""]
|
|
134
|
+
recent.each do |crumb|
|
|
135
|
+
time = format_time(crumb.timestamp)
|
|
136
|
+
lines << "- `#{time}` **[#{crumb.type}]** #{crumb.message}"
|
|
137
|
+
end
|
|
138
|
+
lines.join("\n")
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
# Extract HH:MM:SS from an ISO timestamp.
|
|
142
|
+
def format_time(iso)
|
|
143
|
+
t = Time.parse(iso)
|
|
144
|
+
t.strftime("%H:%M:%S")
|
|
145
|
+
rescue
|
|
146
|
+
iso.to_s
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
def format_environment(env)
|
|
150
|
+
lines = ["## Environment", ""]
|
|
151
|
+
entries = [
|
|
152
|
+
["Deploy Environment", env.deploy],
|
|
153
|
+
["Framework", env.framework],
|
|
154
|
+
["Framework Version", env.framework_version],
|
|
155
|
+
["Runtime", env.runtime],
|
|
156
|
+
["Runtime Version", env.runtime_version],
|
|
157
|
+
["Platform", env.platform],
|
|
158
|
+
["Browser", env.browser ? "#{env.browser} #{env.browser_version}".strip : nil],
|
|
159
|
+
["OS", env.os],
|
|
160
|
+
["Device", env.device_type],
|
|
161
|
+
["Locale", env.locale],
|
|
162
|
+
["Timezone", env.timezone],
|
|
163
|
+
["URL", env.url]
|
|
164
|
+
]
|
|
165
|
+
entries.each do |label, value|
|
|
166
|
+
lines << "- **#{label}:** #{value}" if value && !value.to_s.empty?
|
|
167
|
+
end
|
|
168
|
+
lines.join("\n")
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
private_class_method :extract_location, :format_operation, :format_request,
|
|
172
|
+
:format_breadcrumbs, :format_time, :format_environment
|
|
173
|
+
end
|
|
174
|
+
end
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Uncaught
|
|
4
|
+
# Sliding-window rate limiter.
|
|
5
|
+
#
|
|
6
|
+
# Enforces:
|
|
7
|
+
# - A global maximum of events per 60-second window.
|
|
8
|
+
# - A per-fingerprint maximum of events per 60-second window.
|
|
9
|
+
#
|
|
10
|
+
# Thread-safe via Mutex.
|
|
11
|
+
class RateLimiter
|
|
12
|
+
# Window duration in seconds.
|
|
13
|
+
WINDOW_SECONDS = 60
|
|
14
|
+
|
|
15
|
+
# @param global_max [Integer] Max events across all fingerprints. Defaults to 30.
|
|
16
|
+
# @param per_fingerprint_max [Integer] Max events for a single fingerprint. Defaults to 5.
|
|
17
|
+
def initialize(global_max: 30, per_fingerprint_max: 5)
|
|
18
|
+
@global_max = global_max
|
|
19
|
+
@per_fingerprint_max = per_fingerprint_max
|
|
20
|
+
@global_timestamps = []
|
|
21
|
+
@buckets = {}
|
|
22
|
+
@mutex = Mutex.new
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
# Returns true if the event identified by fingerprint is allowed through,
|
|
26
|
+
# false if it should be dropped.
|
|
27
|
+
#
|
|
28
|
+
# @param fingerprint [String]
|
|
29
|
+
# @return [Boolean]
|
|
30
|
+
def should_allow?(fingerprint)
|
|
31
|
+
@mutex.synchronize do
|
|
32
|
+
now = Time.now.to_f
|
|
33
|
+
|
|
34
|
+
# --- Global limit ---
|
|
35
|
+
prune!(@global_timestamps, now)
|
|
36
|
+
return false if @global_timestamps.size >= @global_max
|
|
37
|
+
|
|
38
|
+
# --- Per-fingerprint limit ---
|
|
39
|
+
@buckets[fingerprint] ||= []
|
|
40
|
+
fp_timestamps = @buckets[fingerprint]
|
|
41
|
+
prune!(fp_timestamps, now)
|
|
42
|
+
return false if fp_timestamps.size >= @per_fingerprint_max
|
|
43
|
+
|
|
44
|
+
# Record this event
|
|
45
|
+
@global_timestamps << now
|
|
46
|
+
fp_timestamps << now
|
|
47
|
+
|
|
48
|
+
# Periodic cleanup: remove empty buckets
|
|
49
|
+
if @buckets.size > 1000
|
|
50
|
+
@buckets.delete_if { |_k, v| v.empty? }
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
true
|
|
54
|
+
end
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
private
|
|
58
|
+
|
|
59
|
+
# Remove timestamps older than the sliding window from an array (in-place).
|
|
60
|
+
def prune!(timestamps, now)
|
|
61
|
+
cutoff = now - WINDOW_SECONDS
|
|
62
|
+
timestamps.shift while timestamps.any? && timestamps.first <= cutoff
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
end
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Uncaught
|
|
4
|
+
module Sanitizer
|
|
5
|
+
# Default key patterns that are always redacted.
|
|
6
|
+
DEFAULT_SENSITIVE_KEYS = %w[
|
|
7
|
+
password passwd secret token apikey api_key authorization
|
|
8
|
+
credit_card creditcard card_number cvv ssn social_security
|
|
9
|
+
private_key access_token refresh_token session_id cookie
|
|
10
|
+
].freeze
|
|
11
|
+
|
|
12
|
+
# Headers that are always stripped regardless of key matching.
|
|
13
|
+
SENSITIVE_HEADERS = Set.new(%w[authorization cookie set-cookie]).freeze
|
|
14
|
+
|
|
15
|
+
REDACTED = "[REDACTED]"
|
|
16
|
+
MAX_STRING_LENGTH = 2048
|
|
17
|
+
|
|
18
|
+
module_function
|
|
19
|
+
|
|
20
|
+
# Deep-clone and sanitise obj, redacting values whose keys match
|
|
21
|
+
# sensitive patterns.
|
|
22
|
+
#
|
|
23
|
+
# - Handles circular references (returns "[Circular]").
|
|
24
|
+
# - Truncates strings longer than 2048 characters.
|
|
25
|
+
# - Never mutates the original object.
|
|
26
|
+
#
|
|
27
|
+
# @param obj [Object] The value to sanitise.
|
|
28
|
+
# @param additional_keys [Array<String>] Extra key patterns to redact.
|
|
29
|
+
# @return [Object]
|
|
30
|
+
def sanitize(obj, additional_keys = [])
|
|
31
|
+
pattern = build_key_pattern(additional_keys)
|
|
32
|
+
seen = Set.new
|
|
33
|
+
walk(obj, pattern, seen, nil)
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
# Build a single Regexp that matches any of the sensitive key patterns
|
|
37
|
+
# (case-insensitive).
|
|
38
|
+
def build_key_pattern(additional_keys = [])
|
|
39
|
+
all_keys = DEFAULT_SENSITIVE_KEYS + (additional_keys || [])
|
|
40
|
+
escaped = all_keys.map { |k| Regexp.escape(k) }
|
|
41
|
+
Regexp.new(escaped.join("|"), Regexp::IGNORECASE)
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
def walk(value, pattern, seen, key)
|
|
45
|
+
# Redact if the current key is sensitive
|
|
46
|
+
if key && pattern.match?(key.to_s)
|
|
47
|
+
return REDACTED
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
case value
|
|
51
|
+
when nil
|
|
52
|
+
nil
|
|
53
|
+
when String
|
|
54
|
+
if value.length > MAX_STRING_LENGTH
|
|
55
|
+
value[0, MAX_STRING_LENGTH] + "...[truncated]"
|
|
56
|
+
else
|
|
57
|
+
value.dup
|
|
58
|
+
end
|
|
59
|
+
when Integer, Float, TrueClass, FalseClass
|
|
60
|
+
value
|
|
61
|
+
when Symbol
|
|
62
|
+
value.to_s
|
|
63
|
+
when Time, DateTime
|
|
64
|
+
value.iso8601(3)
|
|
65
|
+
when Array
|
|
66
|
+
obj_id = value.object_id
|
|
67
|
+
return "[Circular]" if seen.include?(obj_id)
|
|
68
|
+
|
|
69
|
+
seen.add(obj_id)
|
|
70
|
+
result = value.map { |item| walk(item, pattern, seen, nil) }
|
|
71
|
+
seen.delete(obj_id)
|
|
72
|
+
result
|
|
73
|
+
when Hash
|
|
74
|
+
obj_id = value.object_id
|
|
75
|
+
return "[Circular]" if seen.include?(obj_id)
|
|
76
|
+
|
|
77
|
+
seen.add(obj_id)
|
|
78
|
+
result = {}
|
|
79
|
+
value.each do |k, v|
|
|
80
|
+
str_key = k.to_s
|
|
81
|
+
# Always strip sensitive headers
|
|
82
|
+
if SENSITIVE_HEADERS.include?(str_key.downcase)
|
|
83
|
+
result[k] = REDACTED
|
|
84
|
+
else
|
|
85
|
+
result[k] = walk(v, pattern, seen, str_key)
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
seen.delete(obj_id)
|
|
89
|
+
result
|
|
90
|
+
when Struct
|
|
91
|
+
obj_id = value.object_id
|
|
92
|
+
return "[Circular]" if seen.include?(obj_id)
|
|
93
|
+
|
|
94
|
+
seen.add(obj_id)
|
|
95
|
+
result = value.class.new
|
|
96
|
+
value.members.each do |member|
|
|
97
|
+
member_val = value[member]
|
|
98
|
+
sanitized = walk(member_val, pattern, seen, member.to_s)
|
|
99
|
+
result[member] = sanitized
|
|
100
|
+
end
|
|
101
|
+
seen.delete(obj_id)
|
|
102
|
+
result
|
|
103
|
+
else
|
|
104
|
+
# For other objects, try to convert to string
|
|
105
|
+
value.to_s
|
|
106
|
+
end
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
private_class_method :build_key_pattern, :walk
|
|
110
|
+
end
|
|
111
|
+
end
|
|
@@ -0,0 +1,340 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
require "fileutils"
|
|
5
|
+
require "tempfile"
|
|
6
|
+
|
|
7
|
+
module Uncaught
|
|
8
|
+
# Local file transport that writes events to the .uncaught/ directory.
|
|
9
|
+
#
|
|
10
|
+
# Uses atomic writes via Tempfile + File.rename.
|
|
11
|
+
# Updates issues.json index.
|
|
12
|
+
# Optionally writes to SQLite if the sqlite3 gem is available.
|
|
13
|
+
class LocalFileTransport
|
|
14
|
+
# @param config [Configuration]
|
|
15
|
+
def initialize(config)
|
|
16
|
+
@config = config
|
|
17
|
+
@base_dir = config.local_output_dir || File.join(Dir.pwd, ".uncaught")
|
|
18
|
+
@mutex = Mutex.new
|
|
19
|
+
@initialised = false
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
# Send an event to the local file system.
|
|
23
|
+
#
|
|
24
|
+
# @param event [UncaughtEvent]
|
|
25
|
+
def send_event(event)
|
|
26
|
+
ensure_initialised!
|
|
27
|
+
|
|
28
|
+
fp = event.fingerprint
|
|
29
|
+
event_dir = File.join(@base_dir, "events", fp)
|
|
30
|
+
FileUtils.mkdir_p(event_dir)
|
|
31
|
+
|
|
32
|
+
event_hash = event_to_hash(event)
|
|
33
|
+
event_json = JSON.pretty_generate(event_hash)
|
|
34
|
+
|
|
35
|
+
# --- Write timestamped event file (atomic: .tmp -> rename) ----------
|
|
36
|
+
ts = event.timestamp.gsub(/[:.]/, "-")
|
|
37
|
+
event_file = "event-#{ts}.json"
|
|
38
|
+
event_path = File.join(event_dir, event_file)
|
|
39
|
+
atomic_write(event_path, event_json)
|
|
40
|
+
|
|
41
|
+
# --- Write / overwrite latest.json ---------------------------------
|
|
42
|
+
latest_path = File.join(event_dir, "latest.json")
|
|
43
|
+
atomic_write(latest_path, event_json)
|
|
44
|
+
|
|
45
|
+
# --- Write fix-prompt Markdown file --------------------------------
|
|
46
|
+
prompt_file = "#{fp}.md"
|
|
47
|
+
prompt_path = File.join(@base_dir, "fix-prompts", prompt_file)
|
|
48
|
+
atomic_write(prompt_path, event.fix_prompt || "")
|
|
49
|
+
|
|
50
|
+
# --- Update issues.json index -------------------------------------
|
|
51
|
+
update_issues_index(event, event_file, prompt_file)
|
|
52
|
+
|
|
53
|
+
# --- Also write to SQLite -----------------------------------------
|
|
54
|
+
write_to_sqlite(event)
|
|
55
|
+
rescue => e
|
|
56
|
+
# Never crash the host app.
|
|
57
|
+
debug_log("LocalFileTransport#send_event failed: #{e.message}")
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
# Flush queued events (no-op for local file transport).
|
|
61
|
+
def flush
|
|
62
|
+
# Local file transport writes synchronously per-event; nothing to flush.
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
private
|
|
66
|
+
|
|
67
|
+
def ensure_initialised!
|
|
68
|
+
return if @initialised
|
|
69
|
+
|
|
70
|
+
@mutex.synchronize do
|
|
71
|
+
return if @initialised
|
|
72
|
+
|
|
73
|
+
FileUtils.mkdir_p(File.join(@base_dir, "events"))
|
|
74
|
+
FileUtils.mkdir_p(File.join(@base_dir, "fix-prompts"))
|
|
75
|
+
ensure_gitignore
|
|
76
|
+
@initialised = true
|
|
77
|
+
end
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
def ensure_gitignore
|
|
81
|
+
gitignore_path = File.join(Dir.pwd, ".gitignore")
|
|
82
|
+
content = File.exist?(gitignore_path) ? File.read(gitignore_path) : ""
|
|
83
|
+
unless content.include?(".uncaught")
|
|
84
|
+
File.open(gitignore_path, "a") do |f|
|
|
85
|
+
f.write("\n# Uncaught local error store\n.uncaught/\n")
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
rescue
|
|
89
|
+
# Non-critical -- swallow.
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
# Atomic write via tempfile + rename.
|
|
93
|
+
def atomic_write(path, content)
|
|
94
|
+
tmp_path = "#{path}.tmp"
|
|
95
|
+
File.write(tmp_path, content, encoding: "UTF-8")
|
|
96
|
+
File.rename(tmp_path, path)
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
# Read, update, and atomically write the issues.json index.
|
|
100
|
+
def update_issues_index(event, event_file, prompt_file)
|
|
101
|
+
@mutex.synchronize do
|
|
102
|
+
index_path = File.join(@base_dir, "issues.json")
|
|
103
|
+
|
|
104
|
+
issues = []
|
|
105
|
+
if File.exist?(index_path)
|
|
106
|
+
begin
|
|
107
|
+
issues = JSON.parse(File.read(index_path))
|
|
108
|
+
rescue
|
|
109
|
+
issues = []
|
|
110
|
+
end
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
user_id = event.user&.id || event.user&.email || "anonymous"
|
|
114
|
+
existing = issues.find { |i| i["fingerprint"] == event.fingerprint }
|
|
115
|
+
|
|
116
|
+
if existing
|
|
117
|
+
existing["count"] = (existing["count"] || 0) + 1
|
|
118
|
+
existing["lastSeen"] = event.timestamp
|
|
119
|
+
existing["latestEventFile"] = event_file
|
|
120
|
+
existing["fixPromptFile"] = prompt_file
|
|
121
|
+
unless (existing["affectedUsers"] || []).include?(user_id)
|
|
122
|
+
existing["affectedUsers"] = (existing["affectedUsers"] || []) + [user_id]
|
|
123
|
+
end
|
|
124
|
+
# Re-open if previously resolved
|
|
125
|
+
if existing["status"] == "resolved"
|
|
126
|
+
existing["status"] = "open"
|
|
127
|
+
end
|
|
128
|
+
else
|
|
129
|
+
issues << {
|
|
130
|
+
"fingerprint" => event.fingerprint,
|
|
131
|
+
"title" => event.error.message,
|
|
132
|
+
"errorType" => event.error.type,
|
|
133
|
+
"count" => 1,
|
|
134
|
+
"affectedUsers" => [user_id],
|
|
135
|
+
"firstSeen" => event.timestamp,
|
|
136
|
+
"lastSeen" => event.timestamp,
|
|
137
|
+
"status" => "open",
|
|
138
|
+
"fixPromptFile" => prompt_file,
|
|
139
|
+
"latestEventFile" => event_file,
|
|
140
|
+
"release" => event.release,
|
|
141
|
+
"environment" => event.environment&.deploy
|
|
142
|
+
}
|
|
143
|
+
end
|
|
144
|
+
|
|
145
|
+
atomic_write(index_path, JSON.pretty_generate(issues))
|
|
146
|
+
end
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
# Attempt to write to SQLite (best-effort).
|
|
150
|
+
def write_to_sqlite(event)
|
|
151
|
+
require "sqlite3"
|
|
152
|
+
db_path = File.join(@base_dir, "uncaught.db")
|
|
153
|
+
db = SQLite3::Database.new(db_path)
|
|
154
|
+
|
|
155
|
+
db.execute(<<~SQL)
|
|
156
|
+
CREATE TABLE IF NOT EXISTS events (
|
|
157
|
+
event_id TEXT PRIMARY KEY,
|
|
158
|
+
timestamp TEXT NOT NULL,
|
|
159
|
+
fingerprint TEXT NOT NULL,
|
|
160
|
+
level TEXT NOT NULL,
|
|
161
|
+
error_type TEXT,
|
|
162
|
+
error_message TEXT,
|
|
163
|
+
stack TEXT,
|
|
164
|
+
fix_prompt TEXT,
|
|
165
|
+
created_at TEXT DEFAULT (datetime('now'))
|
|
166
|
+
)
|
|
167
|
+
SQL
|
|
168
|
+
|
|
169
|
+
db.execute(<<~SQL, [
|
|
170
|
+
event.event_id, event.timestamp, event.fingerprint, event.level,
|
|
171
|
+
event.error.type, event.error.message, event.error.stack,
|
|
172
|
+
event.fix_prompt
|
|
173
|
+
])
|
|
174
|
+
INSERT OR REPLACE INTO events
|
|
175
|
+
(event_id, timestamp, fingerprint, level, error_type, error_message, stack, fix_prompt)
|
|
176
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
177
|
+
SQL
|
|
178
|
+
|
|
179
|
+
db.close
|
|
180
|
+
rescue LoadError
|
|
181
|
+
# sqlite3 gem not available -- skip.
|
|
182
|
+
rescue => e
|
|
183
|
+
debug_log("SQLite write failed: #{e.message}")
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
# Convert an UncaughtEvent struct to a JSON-compatible hash using camelCase
|
|
187
|
+
# keys to match the TypeScript SDK output format.
|
|
188
|
+
def event_to_hash(event)
|
|
189
|
+
hash = {
|
|
190
|
+
"eventId" => event.event_id,
|
|
191
|
+
"timestamp" => event.timestamp,
|
|
192
|
+
"level" => event.level,
|
|
193
|
+
"fingerprint" => event.fingerprint,
|
|
194
|
+
"error" => error_to_hash(event.error),
|
|
195
|
+
"breadcrumbs" => (event.breadcrumbs || []).map { |b| breadcrumb_to_hash(b) },
|
|
196
|
+
"fixPrompt" => event.fix_prompt || "",
|
|
197
|
+
"sdk" => sdk_to_hash(event.sdk)
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
hash["projectKey"] = event.project_key if event.project_key
|
|
201
|
+
hash["release"] = event.release if event.release
|
|
202
|
+
hash["request"] = request_to_hash(event.request) if event.request
|
|
203
|
+
hash["operation"] = operation_to_hash(event.operation) if event.operation
|
|
204
|
+
hash["environment"] = environment_to_hash(event.environment) if event.environment
|
|
205
|
+
hash["user"] = user_to_hash(event.user) if event.user
|
|
206
|
+
hash["userFeedback"] = event.user_feedback if event.user_feedback
|
|
207
|
+
|
|
208
|
+
hash
|
|
209
|
+
end
|
|
210
|
+
|
|
211
|
+
def error_to_hash(err)
|
|
212
|
+
return {} unless err
|
|
213
|
+
|
|
214
|
+
h = { "message" => err.message, "type" => err.type }
|
|
215
|
+
h["stack"] = err.stack if err.stack
|
|
216
|
+
h["resolvedStack"] = err.resolved_stack if err.resolved_stack
|
|
217
|
+
h["componentStack"] = err.component_stack if err.component_stack
|
|
218
|
+
h
|
|
219
|
+
end
|
|
220
|
+
|
|
221
|
+
def breadcrumb_to_hash(b)
|
|
222
|
+
return {} unless b
|
|
223
|
+
|
|
224
|
+
h = {
|
|
225
|
+
"type" => b.type,
|
|
226
|
+
"category" => b.category,
|
|
227
|
+
"message" => b.message,
|
|
228
|
+
"timestamp" => b.timestamp
|
|
229
|
+
}
|
|
230
|
+
h["data"] = b.data if b.data
|
|
231
|
+
h["level"] = b.level if b.level
|
|
232
|
+
h
|
|
233
|
+
end
|
|
234
|
+
|
|
235
|
+
def sdk_to_hash(sdk)
|
|
236
|
+
return {} unless sdk
|
|
237
|
+
|
|
238
|
+
{ "name" => sdk.name, "version" => sdk.version }
|
|
239
|
+
end
|
|
240
|
+
|
|
241
|
+
def request_to_hash(req)
|
|
242
|
+
return nil unless req
|
|
243
|
+
|
|
244
|
+
h = {}
|
|
245
|
+
h["method"] = req.method if req.method
|
|
246
|
+
h["url"] = req.url if req.url
|
|
247
|
+
h["headers"] = req.headers if req.headers
|
|
248
|
+
h["body"] = req.body if req.body
|
|
249
|
+
h["query"] = req.query if req.query
|
|
250
|
+
h
|
|
251
|
+
end
|
|
252
|
+
|
|
253
|
+
def operation_to_hash(op)
|
|
254
|
+
return nil unless op
|
|
255
|
+
|
|
256
|
+
h = {}
|
|
257
|
+
h["provider"] = op.provider if op.provider
|
|
258
|
+
h["type"] = op.type if op.type
|
|
259
|
+
h["method"] = op.method if op.method
|
|
260
|
+
h["params"] = op.params if op.params
|
|
261
|
+
h["errorCode"] = op.error_code if op.error_code
|
|
262
|
+
h["errorDetails"] = op.error_details if op.error_details
|
|
263
|
+
h
|
|
264
|
+
end
|
|
265
|
+
|
|
266
|
+
def environment_to_hash(env)
|
|
267
|
+
return nil unless env
|
|
268
|
+
|
|
269
|
+
h = {}
|
|
270
|
+
h["framework"] = env.framework if env.framework
|
|
271
|
+
h["frameworkVersion"] = env.framework_version if env.framework_version
|
|
272
|
+
h["runtime"] = env.runtime if env.runtime
|
|
273
|
+
h["runtimeVersion"] = env.runtime_version if env.runtime_version
|
|
274
|
+
h["platform"] = env.platform if env.platform
|
|
275
|
+
h["os"] = env.os if env.os
|
|
276
|
+
h["browser"] = env.browser if env.browser
|
|
277
|
+
h["browserVersion"] = env.browser_version if env.browser_version
|
|
278
|
+
h["deviceType"] = env.device_type if env.device_type
|
|
279
|
+
h["locale"] = env.locale if env.locale
|
|
280
|
+
h["timezone"] = env.timezone if env.timezone
|
|
281
|
+
h["url"] = env.url if env.url
|
|
282
|
+
h["deploy"] = env.deploy if env.deploy
|
|
283
|
+
h
|
|
284
|
+
end
|
|
285
|
+
|
|
286
|
+
def user_to_hash(usr)
|
|
287
|
+
return nil unless usr
|
|
288
|
+
|
|
289
|
+
h = {}
|
|
290
|
+
h["id"] = usr.id if usr.id
|
|
291
|
+
h["email"] = usr.email if usr.email
|
|
292
|
+
h["username"] = usr.username if usr.username
|
|
293
|
+
h["sessionId"] = usr.session_id if usr.session_id
|
|
294
|
+
h
|
|
295
|
+
end
|
|
296
|
+
|
|
297
|
+
def debug_log(msg)
|
|
298
|
+
$stderr.puts("[uncaught] #{msg}") if @config&.debug
|
|
299
|
+
end
|
|
300
|
+
end
|
|
301
|
+
|
|
302
|
+
# Console transport for development/debugging.
|
|
303
|
+
class ConsoleTransport
|
|
304
|
+
def initialize(config)
|
|
305
|
+
@config = config
|
|
306
|
+
end
|
|
307
|
+
|
|
308
|
+
def send_event(event)
|
|
309
|
+
title = "[uncaught] #{event.error.type}: #{event.error.message}"
|
|
310
|
+
$stderr.puts("--- #{title} ---")
|
|
311
|
+
$stderr.puts("Error: #{event.error.message}")
|
|
312
|
+
$stderr.puts("Stack: #{event.error.stack}") if event.error.stack
|
|
313
|
+
$stderr.puts("Event ID: #{event.event_id}")
|
|
314
|
+
$stderr.puts("Fingerprint: #{event.fingerprint}")
|
|
315
|
+
$stderr.puts("Breadcrumbs: #{event.breadcrumbs.inspect}")
|
|
316
|
+
$stderr.puts("Fix Prompt:\n#{event.fix_prompt}") if event.fix_prompt && !event.fix_prompt.empty?
|
|
317
|
+
$stderr.puts("---")
|
|
318
|
+
rescue
|
|
319
|
+
# Never throw from transport.
|
|
320
|
+
end
|
|
321
|
+
|
|
322
|
+
def flush
|
|
323
|
+
# Nothing to flush for console transport.
|
|
324
|
+
end
|
|
325
|
+
end
|
|
326
|
+
|
|
327
|
+
# Factory method to create the appropriate transport.
|
|
328
|
+
#
|
|
329
|
+
# @param config [Configuration]
|
|
330
|
+
# @return [LocalFileTransport, ConsoleTransport]
|
|
331
|
+
def self.create_transport(config)
|
|
332
|
+
mode = config.transport || "local"
|
|
333
|
+
case mode
|
|
334
|
+
when "console"
|
|
335
|
+
ConsoleTransport.new(config)
|
|
336
|
+
else
|
|
337
|
+
LocalFileTransport.new(config)
|
|
338
|
+
end
|
|
339
|
+
end
|
|
340
|
+
end
|