lapsoss 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +5 -0
- data/LICENSE.txt +21 -0
- data/README.md +855 -0
- data/lib/lapsoss/adapters/appsignal_adapter.rb +136 -0
- data/lib/lapsoss/adapters/base.rb +88 -0
- data/lib/lapsoss/adapters/insight_hub_adapter.rb +190 -0
- data/lib/lapsoss/adapters/logger_adapter.rb +67 -0
- data/lib/lapsoss/adapters/rollbar_adapter.rb +157 -0
- data/lib/lapsoss/adapters/sentry_adapter.rb +197 -0
- data/lib/lapsoss/backtrace_frame.rb +258 -0
- data/lib/lapsoss/backtrace_processor.rb +346 -0
- data/lib/lapsoss/client.rb +115 -0
- data/lib/lapsoss/configuration.rb +310 -0
- data/lib/lapsoss/current.rb +9 -0
- data/lib/lapsoss/event.rb +107 -0
- data/lib/lapsoss/exclusions.rb +429 -0
- data/lib/lapsoss/fingerprinter.rb +217 -0
- data/lib/lapsoss/http_client.rb +79 -0
- data/lib/lapsoss/middleware.rb +353 -0
- data/lib/lapsoss/pipeline.rb +131 -0
- data/lib/lapsoss/railtie.rb +72 -0
- data/lib/lapsoss/registry.rb +114 -0
- data/lib/lapsoss/release_tracker.rb +553 -0
- data/lib/lapsoss/router.rb +36 -0
- data/lib/lapsoss/sampling.rb +332 -0
- data/lib/lapsoss/scope.rb +110 -0
- data/lib/lapsoss/scrubber.rb +170 -0
- data/lib/lapsoss/user_context.rb +355 -0
- data/lib/lapsoss/validators.rb +142 -0
- data/lib/lapsoss/version.rb +5 -0
- data/lib/lapsoss.rb +76 -0
- metadata +217 -0
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "singleton"
|
|
4
|
+
require "concurrent"
|
|
5
|
+
require_relative "adapters/base"
|
|
6
|
+
require_relative "adapters/logger_adapter"
|
|
7
|
+
require_relative "adapters/sentry_adapter"
|
|
8
|
+
require_relative "adapters/appsignal_adapter"
|
|
9
|
+
require_relative "adapters/rollbar_adapter"
|
|
10
|
+
require_relative "adapters/insight_hub_adapter"
|
|
11
|
+
|
|
12
|
+
module Lapsoss
|
|
13
|
+
class Registry
|
|
14
|
+
include Singleton
|
|
15
|
+
|
|
16
|
+
class AdapterNotFoundError < StandardError; end
|
|
17
|
+
class DuplicateAdapterError < StandardError; end
|
|
18
|
+
|
|
19
|
+
def initialize
|
|
20
|
+
@adapters = Concurrent::Map.new
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
# Register a named adapter instance
|
|
24
|
+
#
|
|
25
|
+
# @param name [Symbol] Unique identifier for this adapter instance
|
|
26
|
+
# @param type [Symbol] The adapter type (e.g., :sentry, :appsignal)
|
|
27
|
+
# @param settings [Hash] Configuration for the adapter
|
|
28
|
+
# @return [Adapter] The registered adapter instance
|
|
29
|
+
def register(name, type, **settings)
|
|
30
|
+
name = name.to_sym
|
|
31
|
+
|
|
32
|
+
# Check if adapter already exists
|
|
33
|
+
raise DuplicateAdapterError, "Adapter '#{name}' already registered" if @adapters.key?(name)
|
|
34
|
+
|
|
35
|
+
adapter_class = resolve_adapter_class(type)
|
|
36
|
+
adapter = adapter_class.new(name, settings)
|
|
37
|
+
@adapters[name] = adapter
|
|
38
|
+
adapter
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
# Unregister an adapter
|
|
42
|
+
#
|
|
43
|
+
# @param name [Symbol] The adapter name to remove
|
|
44
|
+
def unregister(name)
|
|
45
|
+
adapter = @adapters.delete(name.to_sym)
|
|
46
|
+
adapter&.shutdown if adapter&.respond_to?(:shutdown)
|
|
47
|
+
adapter
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
# Get a specific adapter by name
|
|
51
|
+
#
|
|
52
|
+
# @param name [Symbol] The adapter name
|
|
53
|
+
# @return [Adapter, nil] The adapter instance or nil
|
|
54
|
+
def [](name)
|
|
55
|
+
@adapters[name.to_sym]
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
# Get all registered adapters
|
|
59
|
+
#
|
|
60
|
+
# @return [Array<Adapter>] All adapter instances
|
|
61
|
+
def all
|
|
62
|
+
@adapters.values
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
# Get all active (enabled) adapters
|
|
66
|
+
#
|
|
67
|
+
# @return [Array<Adapter>] Active adapter instances
|
|
68
|
+
def active
|
|
69
|
+
@adapters.values.select(&:enabled?)
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
# Check if an adapter is registered
|
|
73
|
+
#
|
|
74
|
+
# @param name [Symbol] The adapter name
|
|
75
|
+
# @return [Boolean]
|
|
76
|
+
def registered?(name)
|
|
77
|
+
@adapters.key?(name.to_sym)
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
# Clear all adapters
|
|
81
|
+
def clear!
|
|
82
|
+
@adapters.values.each do |adapter|
|
|
83
|
+
adapter.shutdown if adapter.respond_to?(:shutdown)
|
|
84
|
+
end
|
|
85
|
+
@adapters.clear
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
# Get adapter names
|
|
89
|
+
#
|
|
90
|
+
# @return [Array<Symbol>] Registered adapter names
|
|
91
|
+
def names
|
|
92
|
+
@adapters.keys
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
private
|
|
96
|
+
|
|
97
|
+
# Resolve adapter type to class
|
|
98
|
+
def resolve_adapter_class(type)
|
|
99
|
+
adapter_map[type.to_sym] || raise(AdapterNotFoundError, "Unknown adapter type: #{type}")
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
# Map of adapter types to classes
|
|
103
|
+
def adapter_map
|
|
104
|
+
{
|
|
105
|
+
logger: Adapters::LoggerAdapter,
|
|
106
|
+
sentry: Adapters::SentryAdapter,
|
|
107
|
+
appsignal: Adapters::AppsignalAdapter,
|
|
108
|
+
rollbar: Adapters::RollbarAdapter,
|
|
109
|
+
insight_hub: Adapters::InsightHubAdapter,
|
|
110
|
+
bugsnag: Adapters::InsightHubAdapter # Backwards compatibility
|
|
111
|
+
}
|
|
112
|
+
end
|
|
113
|
+
end
|
|
114
|
+
end
|
|
@@ -0,0 +1,553 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "digest"
|
|
4
|
+
|
|
5
|
+
module Lapsoss
|
|
6
|
+
# Release and version tracking system
|
|
7
|
+
class ReleaseTracker
|
|
8
|
+
def initialize(configuration = {})
|
|
9
|
+
@version_providers = configuration[:version_providers] || []
|
|
10
|
+
@git_enabled = configuration[:git_enabled] != false
|
|
11
|
+
@environment_enabled = configuration[:environment_enabled] != false
|
|
12
|
+
@deployment_enabled = configuration[:deployment_enabled] != false
|
|
13
|
+
@cache_duration = configuration[:cache_duration] || 300 # 5 minutes
|
|
14
|
+
@cached_release_info = nil
|
|
15
|
+
@cache_timestamp = nil
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def get_release_info
|
|
19
|
+
now = Time.now
|
|
20
|
+
|
|
21
|
+
# Return cached info if still valid
|
|
22
|
+
if @cached_release_info && @cache_timestamp && (now - @cache_timestamp) < @cache_duration
|
|
23
|
+
return @cached_release_info
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
# Build fresh release info
|
|
27
|
+
release_info = {}
|
|
28
|
+
|
|
29
|
+
# Add custom version providers
|
|
30
|
+
@version_providers.each do |provider|
|
|
31
|
+
begin
|
|
32
|
+
if provider_info = provider.call
|
|
33
|
+
release_info.merge!(provider_info)
|
|
34
|
+
end
|
|
35
|
+
rescue StandardError => e
|
|
36
|
+
warn "Release provider failed: #{e.message}"
|
|
37
|
+
end
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
# Add Git information
|
|
41
|
+
if @git_enabled
|
|
42
|
+
if git_info = detect_git_info
|
|
43
|
+
release_info.merge!(git_info)
|
|
44
|
+
end
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
# Add environment information
|
|
48
|
+
if @environment_enabled
|
|
49
|
+
if env_info = detect_environment_info
|
|
50
|
+
release_info.merge!(env_info)
|
|
51
|
+
end
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
# Add deployment information
|
|
55
|
+
if @deployment_enabled
|
|
56
|
+
if deployment_info = detect_deployment_info
|
|
57
|
+
release_info.merge!(deployment_info)
|
|
58
|
+
end
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
# Generate release ID if not provided
|
|
62
|
+
release_info[:release_id] ||= generate_release_id(release_info)
|
|
63
|
+
|
|
64
|
+
# Cache the result
|
|
65
|
+
@cached_release_info = release_info
|
|
66
|
+
@cache_timestamp = now
|
|
67
|
+
|
|
68
|
+
release_info
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
def add_version_provider(&block)
|
|
72
|
+
@version_providers << block
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
def clear_cache
|
|
76
|
+
@cached_release_info = nil
|
|
77
|
+
@cache_timestamp = nil
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
private
|
|
81
|
+
|
|
82
|
+
def detect_git_info
|
|
83
|
+
return nil unless File.exist?(".git")
|
|
84
|
+
|
|
85
|
+
git_info = {}
|
|
86
|
+
|
|
87
|
+
begin
|
|
88
|
+
# Get current commit SHA
|
|
89
|
+
commit_sha = execute_git_command("rev-parse HEAD")
|
|
90
|
+
git_info[:commit_sha] = commit_sha if commit_sha
|
|
91
|
+
|
|
92
|
+
# Get short commit SHA
|
|
93
|
+
short_sha = execute_git_command("rev-parse --short HEAD")
|
|
94
|
+
git_info[:short_sha] = short_sha if short_sha
|
|
95
|
+
|
|
96
|
+
# Get branch name
|
|
97
|
+
branch = execute_git_command("rev-parse --abbrev-ref HEAD")
|
|
98
|
+
git_info[:branch] = branch if branch && branch != "HEAD"
|
|
99
|
+
|
|
100
|
+
# Get commit timestamp
|
|
101
|
+
commit_timestamp = execute_git_command("log -1 --format=%ct")
|
|
102
|
+
if commit_timestamp && !commit_timestamp.empty?
|
|
103
|
+
git_info[:commit_timestamp] = Time.at(commit_timestamp.to_i)
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
# Get commit message
|
|
107
|
+
commit_message = execute_git_command("log -1 --format=%s")
|
|
108
|
+
git_info[:commit_message] = commit_message if commit_message
|
|
109
|
+
|
|
110
|
+
# Get committer info
|
|
111
|
+
committer = execute_git_command("log -1 --format=%cn")
|
|
112
|
+
git_info[:committer] = committer if committer
|
|
113
|
+
|
|
114
|
+
# Get tag if on a tag
|
|
115
|
+
tag = execute_git_command("describe --exact-match --tags HEAD 2>/dev/null")
|
|
116
|
+
git_info[:tag] = tag if tag && !tag.empty?
|
|
117
|
+
|
|
118
|
+
# Get latest tag
|
|
119
|
+
latest_tag = execute_git_command("describe --tags --abbrev=0 2>/dev/null")
|
|
120
|
+
git_info[:latest_tag] = latest_tag if latest_tag && !latest_tag.empty?
|
|
121
|
+
|
|
122
|
+
# Get commits since latest tag
|
|
123
|
+
if latest_tag
|
|
124
|
+
commits_since_tag = execute_git_command("rev-list #{latest_tag}..HEAD --count")
|
|
125
|
+
git_info[:commits_since_tag] = commits_since_tag.to_i if commits_since_tag
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
# Check if working directory is dirty
|
|
129
|
+
git_status = execute_git_command("status --porcelain")
|
|
130
|
+
git_info[:dirty] = !git_status.empty? if git_status
|
|
131
|
+
|
|
132
|
+
# Get remote URL
|
|
133
|
+
remote_url = execute_git_command("config --get remote.origin.url")
|
|
134
|
+
if remote_url
|
|
135
|
+
git_info[:remote_url] = sanitize_remote_url(remote_url)
|
|
136
|
+
end
|
|
137
|
+
|
|
138
|
+
git_info
|
|
139
|
+
rescue StandardError => e
|
|
140
|
+
warn "Failed to detect Git info: #{e.message}"
|
|
141
|
+
nil
|
|
142
|
+
end
|
|
143
|
+
end
|
|
144
|
+
|
|
145
|
+
def detect_environment_info
|
|
146
|
+
env_info = {}
|
|
147
|
+
|
|
148
|
+
# Application version from common environment variables
|
|
149
|
+
env_info[:app_version] = ENV["APP_VERSION"] if ENV["APP_VERSION"]
|
|
150
|
+
env_info[:version] = ENV["VERSION"] if ENV["VERSION"]
|
|
151
|
+
|
|
152
|
+
# Environment detection
|
|
153
|
+
env_info[:environment] = detect_environment
|
|
154
|
+
|
|
155
|
+
# Application name
|
|
156
|
+
env_info[:app_name] = ENV["APP_NAME"] if ENV["APP_NAME"]
|
|
157
|
+
|
|
158
|
+
# Build information
|
|
159
|
+
env_info[:build_number] = ENV["BUILD_NUMBER"] if ENV["BUILD_NUMBER"]
|
|
160
|
+
env_info[:build_id] = ENV["BUILD_ID"] if ENV["BUILD_ID"]
|
|
161
|
+
env_info[:build_url] = ENV["BUILD_URL"] if ENV["BUILD_URL"]
|
|
162
|
+
|
|
163
|
+
# CI/CD information
|
|
164
|
+
env_info[:ci] = detect_ci_info
|
|
165
|
+
|
|
166
|
+
env_info.compact
|
|
167
|
+
end
|
|
168
|
+
|
|
169
|
+
def detect_deployment_info
|
|
170
|
+
deployment_info = {}
|
|
171
|
+
|
|
172
|
+
# Deployment timestamp
|
|
173
|
+
if ENV["DEPLOYMENT_TIME"]
|
|
174
|
+
deployment_info[:deployment_time] = parse_time(ENV["DEPLOYMENT_TIME"])
|
|
175
|
+
elsif ENV["DEPLOYED_AT"]
|
|
176
|
+
deployment_info[:deployment_time] = parse_time(ENV["DEPLOYED_AT"])
|
|
177
|
+
end
|
|
178
|
+
|
|
179
|
+
# Deployment ID
|
|
180
|
+
deployment_info[:deployment_id] = ENV["DEPLOYMENT_ID"] if ENV["DEPLOYMENT_ID"]
|
|
181
|
+
|
|
182
|
+
# Platform-specific detection
|
|
183
|
+
deployment_info.merge!(detect_heroku_info)
|
|
184
|
+
deployment_info.merge!(detect_aws_info)
|
|
185
|
+
deployment_info.merge!(detect_gcp_info)
|
|
186
|
+
deployment_info.merge!(detect_azure_info)
|
|
187
|
+
deployment_info.merge!(detect_docker_info)
|
|
188
|
+
deployment_info.merge!(detect_kubernetes_info)
|
|
189
|
+
|
|
190
|
+
deployment_info.compact
|
|
191
|
+
end
|
|
192
|
+
|
|
193
|
+
def detect_environment
|
|
194
|
+
return ENV["RAILS_ENV"] if ENV["RAILS_ENV"]
|
|
195
|
+
return ENV["RACK_ENV"] if ENV["RACK_ENV"]
|
|
196
|
+
return ENV["NODE_ENV"] if ENV["NODE_ENV"]
|
|
197
|
+
return ENV["ENVIRONMENT"] if ENV["ENVIRONMENT"]
|
|
198
|
+
return ENV["ENV"] if ENV["ENV"]
|
|
199
|
+
|
|
200
|
+
# Try to detect from Rails if available
|
|
201
|
+
if defined?(Rails) && Rails.respond_to?(:env)
|
|
202
|
+
return Rails.env.to_s
|
|
203
|
+
end
|
|
204
|
+
|
|
205
|
+
# Default fallback
|
|
206
|
+
"unknown"
|
|
207
|
+
end
|
|
208
|
+
|
|
209
|
+
def detect_ci_info
|
|
210
|
+
ci_info = {}
|
|
211
|
+
|
|
212
|
+
# GitHub Actions
|
|
213
|
+
if ENV["GITHUB_ACTIONS"]
|
|
214
|
+
ci_info[:provider] = "github_actions"
|
|
215
|
+
ci_info[:run_id] = ENV["GITHUB_RUN_ID"]
|
|
216
|
+
ci_info[:run_number] = ENV["GITHUB_RUN_NUMBER"]
|
|
217
|
+
ci_info[:workflow] = ENV["GITHUB_WORKFLOW"]
|
|
218
|
+
ci_info[:actor] = ENV["GITHUB_ACTOR"]
|
|
219
|
+
ci_info[:repository] = ENV["GITHUB_REPOSITORY"]
|
|
220
|
+
ci_info[:ref] = ENV["GITHUB_REF"]
|
|
221
|
+
ci_info[:sha] = ENV["GITHUB_SHA"]
|
|
222
|
+
end
|
|
223
|
+
|
|
224
|
+
# GitLab CI
|
|
225
|
+
if ENV["GITLAB_CI"]
|
|
226
|
+
ci_info[:provider] = "gitlab_ci"
|
|
227
|
+
ci_info[:pipeline_id] = ENV["CI_PIPELINE_ID"]
|
|
228
|
+
ci_info[:job_id] = ENV["CI_JOB_ID"]
|
|
229
|
+
ci_info[:job_name] = ENV["CI_JOB_NAME"]
|
|
230
|
+
ci_info[:commit_sha] = ENV["CI_COMMIT_SHA"]
|
|
231
|
+
ci_info[:commit_ref] = ENV["CI_COMMIT_REF_NAME"]
|
|
232
|
+
ci_info[:project_url] = ENV["CI_PROJECT_URL"]
|
|
233
|
+
end
|
|
234
|
+
|
|
235
|
+
# Jenkins
|
|
236
|
+
if ENV["JENKINS_URL"]
|
|
237
|
+
ci_info[:provider] = "jenkins"
|
|
238
|
+
ci_info[:build_number] = ENV["BUILD_NUMBER"]
|
|
239
|
+
ci_info[:build_id] = ENV["BUILD_ID"]
|
|
240
|
+
ci_info[:job_name] = ENV["JOB_NAME"]
|
|
241
|
+
ci_info[:build_url] = ENV["BUILD_URL"]
|
|
242
|
+
ci_info[:git_commit] = ENV["GIT_COMMIT"]
|
|
243
|
+
ci_info[:git_branch] = ENV["GIT_BRANCH"]
|
|
244
|
+
end
|
|
245
|
+
|
|
246
|
+
# CircleCI
|
|
247
|
+
if ENV["CIRCLECI"]
|
|
248
|
+
ci_info[:provider] = "circleci"
|
|
249
|
+
ci_info[:build_num] = ENV["CIRCLE_BUILD_NUM"]
|
|
250
|
+
ci_info[:workflow_id] = ENV["CIRCLE_WORKFLOW_ID"]
|
|
251
|
+
ci_info[:job] = ENV["CIRCLE_JOB"]
|
|
252
|
+
ci_info[:project_reponame] = ENV["CIRCLE_PROJECT_REPONAME"]
|
|
253
|
+
ci_info[:sha1] = ENV["CIRCLE_SHA1"]
|
|
254
|
+
ci_info[:branch] = ENV["CIRCLE_BRANCH"]
|
|
255
|
+
end
|
|
256
|
+
|
|
257
|
+
# Travis CI
|
|
258
|
+
if ENV["TRAVIS"]
|
|
259
|
+
ci_info[:provider] = "travis"
|
|
260
|
+
ci_info[:build_id] = ENV["TRAVIS_BUILD_ID"]
|
|
261
|
+
ci_info[:build_number] = ENV["TRAVIS_BUILD_NUMBER"]
|
|
262
|
+
ci_info[:job_id] = ENV["TRAVIS_JOB_ID"]
|
|
263
|
+
ci_info[:commit] = ENV["TRAVIS_COMMIT"]
|
|
264
|
+
ci_info[:branch] = ENV["TRAVIS_BRANCH"]
|
|
265
|
+
ci_info[:tag] = ENV["TRAVIS_TAG"]
|
|
266
|
+
end
|
|
267
|
+
|
|
268
|
+
ci_info
|
|
269
|
+
end
|
|
270
|
+
|
|
271
|
+
def detect_heroku_info
|
|
272
|
+
return {} unless ENV["HEROKU_APP_NAME"]
|
|
273
|
+
|
|
274
|
+
{
|
|
275
|
+
platform: "heroku",
|
|
276
|
+
app_name: ENV["HEROKU_APP_NAME"],
|
|
277
|
+
dyno: ENV["DYNO"],
|
|
278
|
+
slug_commit: ENV["HEROKU_SLUG_COMMIT"],
|
|
279
|
+
release_version: ENV["HEROKU_RELEASE_VERSION"],
|
|
280
|
+
slug_description: ENV["HEROKU_SLUG_DESCRIPTION"]
|
|
281
|
+
}
|
|
282
|
+
end
|
|
283
|
+
|
|
284
|
+
def detect_aws_info
|
|
285
|
+
info = {}
|
|
286
|
+
|
|
287
|
+
if ENV["AWS_EXECUTION_ENV"]
|
|
288
|
+
info[:platform] = "aws"
|
|
289
|
+
info[:execution_env] = ENV["AWS_EXECUTION_ENV"]
|
|
290
|
+
info[:region] = ENV["AWS_REGION"] || ENV["AWS_DEFAULT_REGION"]
|
|
291
|
+
info[:function_name] = ENV["AWS_LAMBDA_FUNCTION_NAME"]
|
|
292
|
+
info[:function_version] = ENV["AWS_LAMBDA_FUNCTION_VERSION"]
|
|
293
|
+
end
|
|
294
|
+
|
|
295
|
+
# EC2 metadata (if available)
|
|
296
|
+
if ENV["EC2_INSTANCE_ID"]
|
|
297
|
+
info[:platform] = "aws_ec2"
|
|
298
|
+
info[:instance_id] = ENV["EC2_INSTANCE_ID"]
|
|
299
|
+
info[:instance_type] = ENV["EC2_INSTANCE_TYPE"]
|
|
300
|
+
info[:availability_zone] = ENV["EC2_AVAILABILITY_ZONE"]
|
|
301
|
+
end
|
|
302
|
+
|
|
303
|
+
info
|
|
304
|
+
end
|
|
305
|
+
|
|
306
|
+
def detect_gcp_info
|
|
307
|
+
info = {}
|
|
308
|
+
|
|
309
|
+
if ENV["GOOGLE_CLOUD_PROJECT"]
|
|
310
|
+
info[:platform] = "gcp"
|
|
311
|
+
info[:project] = ENV["GOOGLE_CLOUD_PROJECT"]
|
|
312
|
+
info[:region] = ENV["GOOGLE_CLOUD_REGION"]
|
|
313
|
+
info[:function_name] = ENV["FUNCTION_NAME"]
|
|
314
|
+
info[:function_signature_type] = ENV["FUNCTION_SIGNATURE_TYPE"]
|
|
315
|
+
end
|
|
316
|
+
|
|
317
|
+
# App Engine
|
|
318
|
+
if ENV["GAE_APPLICATION"]
|
|
319
|
+
info[:platform] = "gcp_app_engine"
|
|
320
|
+
info[:application] = ENV["GAE_APPLICATION"]
|
|
321
|
+
info[:service] = ENV["GAE_SERVICE"]
|
|
322
|
+
info[:version] = ENV["GAE_VERSION"]
|
|
323
|
+
info[:runtime] = ENV["GAE_RUNTIME"]
|
|
324
|
+
end
|
|
325
|
+
|
|
326
|
+
info
|
|
327
|
+
end
|
|
328
|
+
|
|
329
|
+
def detect_azure_info
|
|
330
|
+
info = {}
|
|
331
|
+
|
|
332
|
+
if ENV["WEBSITE_SITE_NAME"]
|
|
333
|
+
info[:platform] = "azure"
|
|
334
|
+
info[:site_name] = ENV["WEBSITE_SITE_NAME"]
|
|
335
|
+
info[:resource_group] = ENV["WEBSITE_RESOURCE_GROUP"]
|
|
336
|
+
info[:subscription_id] = ENV["WEBSITE_OWNER_NAME"]
|
|
337
|
+
info[:sku] = ENV["WEBSITE_SKU"]
|
|
338
|
+
end
|
|
339
|
+
|
|
340
|
+
info
|
|
341
|
+
end
|
|
342
|
+
|
|
343
|
+
def detect_docker_info
|
|
344
|
+
info = {}
|
|
345
|
+
|
|
346
|
+
if ENV["DOCKER_CONTAINER_ID"] || File.exist?("/.dockerenv")
|
|
347
|
+
info[:platform] = "docker"
|
|
348
|
+
info[:container_id] = ENV["DOCKER_CONTAINER_ID"]
|
|
349
|
+
info[:image] = ENV["DOCKER_IMAGE"]
|
|
350
|
+
info[:tag] = ENV["DOCKER_TAG"]
|
|
351
|
+
end
|
|
352
|
+
|
|
353
|
+
info
|
|
354
|
+
end
|
|
355
|
+
|
|
356
|
+
def detect_kubernetes_info
|
|
357
|
+
info = {}
|
|
358
|
+
|
|
359
|
+
if ENV["KUBERNETES_SERVICE_HOST"]
|
|
360
|
+
info[:platform] = "kubernetes"
|
|
361
|
+
info[:namespace] = ENV["KUBERNETES_NAMESPACE"]
|
|
362
|
+
info[:pod_name] = ENV["HOSTNAME"]
|
|
363
|
+
info[:service_account] = ENV["KUBERNETES_SERVICE_ACCOUNT"]
|
|
364
|
+
info[:cluster_name] = ENV["CLUSTER_NAME"]
|
|
365
|
+
info[:node_name] = ENV["NODE_NAME"]
|
|
366
|
+
end
|
|
367
|
+
|
|
368
|
+
info
|
|
369
|
+
end
|
|
370
|
+
|
|
371
|
+
def execute_git_command(command)
|
|
372
|
+
result = `git #{command} 2>/dev/null`.strip
|
|
373
|
+
result.empty? ? nil : result
|
|
374
|
+
rescue StandardError
|
|
375
|
+
nil
|
|
376
|
+
end
|
|
377
|
+
|
|
378
|
+
def sanitize_remote_url(url)
|
|
379
|
+
# Remove credentials from Git URLs
|
|
380
|
+
url.gsub(%r{://[^@/]+@}, "://")
|
|
381
|
+
end
|
|
382
|
+
|
|
383
|
+
def parse_time(time_str)
|
|
384
|
+
return nil unless time_str
|
|
385
|
+
|
|
386
|
+
# Try different time formats
|
|
387
|
+
formats = [
|
|
388
|
+
"%Y-%m-%dT%H:%M:%S%z", # ISO 8601 with timezone
|
|
389
|
+
"%Y-%m-%dT%H:%M:%SZ", # ISO 8601 UTC
|
|
390
|
+
"%Y-%m-%d %H:%M:%S %z", # Standard format with timezone
|
|
391
|
+
"%Y-%m-%d %H:%M:%S", # Standard format without timezone
|
|
392
|
+
"%s" # Unix timestamp
|
|
393
|
+
]
|
|
394
|
+
|
|
395
|
+
formats.each do |format|
|
|
396
|
+
begin
|
|
397
|
+
return Time.strptime(time_str, format)
|
|
398
|
+
rescue ArgumentError
|
|
399
|
+
next
|
|
400
|
+
end
|
|
401
|
+
end
|
|
402
|
+
|
|
403
|
+
# Try parsing as integer (Unix timestamp)
|
|
404
|
+
begin
|
|
405
|
+
return Time.at(time_str.to_i) if time_str.match?(/^\d+$/)
|
|
406
|
+
rescue ArgumentError
|
|
407
|
+
nil
|
|
408
|
+
end
|
|
409
|
+
|
|
410
|
+
nil
|
|
411
|
+
end
|
|
412
|
+
|
|
413
|
+
def generate_release_id(release_info)
|
|
414
|
+
# Generate a unique release ID based on available information
|
|
415
|
+
components = []
|
|
416
|
+
|
|
417
|
+
# Prioritize version-like information
|
|
418
|
+
components << release_info[:app_version] if release_info[:app_version]
|
|
419
|
+
components << release_info[:version] if release_info[:version]
|
|
420
|
+
components << release_info[:tag] if release_info[:tag]
|
|
421
|
+
components << release_info[:short_sha] if release_info[:short_sha]
|
|
422
|
+
components << release_info[:commit_sha] if release_info[:commit_sha] && components.empty?
|
|
423
|
+
|
|
424
|
+
# Add environment if available
|
|
425
|
+
components << release_info[:environment] if release_info[:environment]
|
|
426
|
+
|
|
427
|
+
# Add deployment ID if available
|
|
428
|
+
components << release_info[:deployment_id] if release_info[:deployment_id]
|
|
429
|
+
|
|
430
|
+
# If we have components, join them
|
|
431
|
+
if components.any?
|
|
432
|
+
release_id = components.join("-")
|
|
433
|
+
# Truncate if too long
|
|
434
|
+
release_id.length > 64 ? release_id[0, 64] : release_id
|
|
435
|
+
else
|
|
436
|
+
# Generate hash from all available info
|
|
437
|
+
info_string = release_info.to_s
|
|
438
|
+
Digest::SHA256.hexdigest(info_string)[0, 8]
|
|
439
|
+
end
|
|
440
|
+
end
|
|
441
|
+
end
|
|
442
|
+
|
|
443
|
+
# Built-in release providers for common scenarios
|
|
444
|
+
class ReleaseProviders
|
|
445
|
+
def self.from_file(file_path)
|
|
446
|
+
lambda do
|
|
447
|
+
return nil unless File.exist?(file_path)
|
|
448
|
+
|
|
449
|
+
content = File.read(file_path).strip
|
|
450
|
+
return nil if content.empty?
|
|
451
|
+
|
|
452
|
+
# Try to parse as JSON first
|
|
453
|
+
begin
|
|
454
|
+
JSON.parse(content)
|
|
455
|
+
rescue JSON::ParserError
|
|
456
|
+
# Treat as plain text version
|
|
457
|
+
{ version: content }
|
|
458
|
+
end
|
|
459
|
+
end
|
|
460
|
+
end
|
|
461
|
+
|
|
462
|
+
def self.from_ruby_constant(constant_name)
|
|
463
|
+
lambda do
|
|
464
|
+
begin
|
|
465
|
+
constant = Object.const_get(constant_name)
|
|
466
|
+
{ version: constant.to_s }
|
|
467
|
+
rescue NameError
|
|
468
|
+
nil
|
|
469
|
+
end
|
|
470
|
+
end
|
|
471
|
+
end
|
|
472
|
+
|
|
473
|
+
def self.from_gemfile_lock
|
|
474
|
+
lambda do
|
|
475
|
+
return nil unless File.exist?("Gemfile.lock")
|
|
476
|
+
|
|
477
|
+
content = File.read("Gemfile.lock")
|
|
478
|
+
|
|
479
|
+
# Extract gems with versions
|
|
480
|
+
gems = {}
|
|
481
|
+
content.scan(/^\s{4}(\w+)\s+\(([^)]+)\)/).each do |name, version|
|
|
482
|
+
gems[name] = version
|
|
483
|
+
end
|
|
484
|
+
|
|
485
|
+
{ gems: gems }
|
|
486
|
+
end
|
|
487
|
+
end
|
|
488
|
+
|
|
489
|
+
def self.from_package_json
|
|
490
|
+
lambda do
|
|
491
|
+
return nil unless File.exist?("package.json")
|
|
492
|
+
|
|
493
|
+
begin
|
|
494
|
+
package_info = JSON.parse(File.read("package.json"))
|
|
495
|
+
{
|
|
496
|
+
version: package_info["version"],
|
|
497
|
+
name: package_info["name"],
|
|
498
|
+
dependencies: package_info["dependencies"]&.keys
|
|
499
|
+
}.compact
|
|
500
|
+
rescue JSON::ParserError
|
|
501
|
+
nil
|
|
502
|
+
end
|
|
503
|
+
end
|
|
504
|
+
end
|
|
505
|
+
|
|
506
|
+
def self.from_rails_application
|
|
507
|
+
lambda do
|
|
508
|
+
return nil unless defined?(Rails) && Rails.respond_to?(:application)
|
|
509
|
+
|
|
510
|
+
app = Rails.application
|
|
511
|
+
return nil unless app
|
|
512
|
+
|
|
513
|
+
info = {
|
|
514
|
+
rails_version: Rails.version,
|
|
515
|
+
environment: Rails.env,
|
|
516
|
+
root: Rails.root.to_s
|
|
517
|
+
}
|
|
518
|
+
|
|
519
|
+
# Get application version if defined
|
|
520
|
+
if app.class.respond_to?(:version)
|
|
521
|
+
info[:app_version] = app.class.version
|
|
522
|
+
end
|
|
523
|
+
|
|
524
|
+
# Get application name
|
|
525
|
+
if app.class.respond_to?(:name)
|
|
526
|
+
info[:app_name] = app.class.name
|
|
527
|
+
end
|
|
528
|
+
|
|
529
|
+
info
|
|
530
|
+
end
|
|
531
|
+
end
|
|
532
|
+
|
|
533
|
+
def self.from_capistrano
|
|
534
|
+
lambda do
|
|
535
|
+
# Check for Capistrano deployment files
|
|
536
|
+
%w[REVISION current/REVISION].each do |file|
|
|
537
|
+
next unless File.exist?(file)
|
|
538
|
+
|
|
539
|
+
revision = File.read(file).strip
|
|
540
|
+
next if revision.empty?
|
|
541
|
+
|
|
542
|
+
return {
|
|
543
|
+
revision: revision,
|
|
544
|
+
deployed_at: File.mtime(file),
|
|
545
|
+
deployment_method: "capistrano"
|
|
546
|
+
}
|
|
547
|
+
end
|
|
548
|
+
|
|
549
|
+
nil
|
|
550
|
+
end
|
|
551
|
+
end
|
|
552
|
+
end
|
|
553
|
+
end
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Lapsoss
|
|
4
|
+
class Router
|
|
5
|
+
class << self
|
|
6
|
+
# Processes an event by dispatching it to all active adapters.
|
|
7
|
+
# The actual dispatch (sync/async) is handled by the Client.
|
|
8
|
+
#
|
|
9
|
+
# @param event [Lapsoss::Event] The event to process.
|
|
10
|
+
def process_event(event)
|
|
11
|
+
Registry.instance.active.each do |adapter|
|
|
12
|
+
begin
|
|
13
|
+
adapter.capture(event)
|
|
14
|
+
rescue => e
|
|
15
|
+
handle_adapter_error(adapter, event, e)
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
private
|
|
21
|
+
|
|
22
|
+
# Handle adapter errors gracefully
|
|
23
|
+
def handle_adapter_error(adapter, event, error)
|
|
24
|
+
return unless Lapsoss.configuration.logger
|
|
25
|
+
|
|
26
|
+
Lapsoss.configuration.logger.error(
|
|
27
|
+
"[Lapsoss] Adapter '#{adapter.name}' failed to capture event (type: #{event.type}): #{error.message}"
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
# Call error handler if configured
|
|
31
|
+
handler = Lapsoss.configuration.error_handler
|
|
32
|
+
handler&.call(adapter, event, error)
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
end
|