sumologic-query 1.1.0 → 1.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +105 -200
- data/bin/sumo-query +5 -101
- data/lib/sumologic/cli.rb +208 -0
- data/lib/sumologic/client.rb +44 -200
- data/lib/sumologic/configuration.rb +55 -0
- data/lib/sumologic/http/authenticator.rb +20 -0
- data/lib/sumologic/http/client.rb +80 -0
- data/lib/sumologic/metadata/collector.rb +33 -0
- data/lib/sumologic/metadata/source.rb +72 -0
- data/lib/sumologic/search/job.rb +68 -0
- data/lib/sumologic/search/paginator.rb +67 -0
- data/lib/sumologic/search/poller.rb +80 -0
- data/lib/sumologic/version.rb +1 -1
- data/lib/sumologic.rb +33 -2
- metadata +25 -2
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Sumologic
|
|
4
|
+
module Metadata
|
|
5
|
+
# Handles source metadata operations
|
|
6
|
+
class Source
|
|
7
|
+
def initialize(http_client:, collector_client:)
|
|
8
|
+
@http = http_client
|
|
9
|
+
@collector_client = collector_client
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
# List sources for a specific collector
|
|
13
|
+
# Returns array of source objects with metadata
|
|
14
|
+
def list(collector_id:)
|
|
15
|
+
data = @http.request(
|
|
16
|
+
method: :get,
|
|
17
|
+
path: "/collectors/#{collector_id}/sources"
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
sources = data['sources'] || []
|
|
21
|
+
log_info "Found #{sources.size} sources for collector #{collector_id}"
|
|
22
|
+
sources
|
|
23
|
+
rescue StandardError => e
|
|
24
|
+
raise Error, "Failed to list sources for collector #{collector_id}: #{e.message}"
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
# List all sources from all collectors
|
|
28
|
+
# Returns array of hashes with collector info and their sources
|
|
29
|
+
def list_all
|
|
30
|
+
collectors = @collector_client.list
|
|
31
|
+
result = []
|
|
32
|
+
|
|
33
|
+
collectors.each do |collector|
|
|
34
|
+
next unless collector['alive'] # Skip offline collectors
|
|
35
|
+
|
|
36
|
+
collector_id = collector['id']
|
|
37
|
+
collector_name = collector['name']
|
|
38
|
+
|
|
39
|
+
log_info "Fetching sources for collector: #{collector_name} (#{collector_id})"
|
|
40
|
+
|
|
41
|
+
sources = list(collector_id: collector_id)
|
|
42
|
+
|
|
43
|
+
result << {
|
|
44
|
+
'collector' => {
|
|
45
|
+
'id' => collector_id,
|
|
46
|
+
'name' => collector_name,
|
|
47
|
+
'collectorType' => collector['collectorType']
|
|
48
|
+
},
|
|
49
|
+
'sources' => sources
|
|
50
|
+
}
|
|
51
|
+
rescue StandardError => e
|
|
52
|
+
log_error "Failed to fetch sources for collector #{collector_name}: #{e.message}"
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
log_info "Total: #{result.size} collectors with sources"
|
|
56
|
+
result
|
|
57
|
+
rescue StandardError => e
|
|
58
|
+
raise Error, "Failed to list all sources: #{e.message}"
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
private
|
|
62
|
+
|
|
63
|
+
def log_info(message)
|
|
64
|
+
warn "[Sumologic::Metadata::Source] #{message}" if ENV['SUMO_DEBUG'] || $DEBUG
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
def log_error(message)
|
|
68
|
+
warn "[Sumologic::Metadata::Source ERROR] #{message}"
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
end
|
|
72
|
+
end
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Sumologic
|
|
4
|
+
module Search
|
|
5
|
+
# Manages search job lifecycle: create, poll, fetch, delete
|
|
6
|
+
class Job
|
|
7
|
+
def initialize(http_client:, config:)
|
|
8
|
+
@http = http_client
|
|
9
|
+
@config = config
|
|
10
|
+
@poller = Poller.new(http_client: http_client, config: config)
|
|
11
|
+
@paginator = Paginator.new(http_client: http_client, config: config)
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
# Execute a complete search workflow
|
|
15
|
+
# Returns array of messages
|
|
16
|
+
def execute(query:, from_time:, to_time:, time_zone: 'UTC', limit: nil)
|
|
17
|
+
job_id = create(query, from_time, to_time, time_zone)
|
|
18
|
+
@poller.poll(job_id)
|
|
19
|
+
messages = @paginator.fetch_all(job_id, limit: limit)
|
|
20
|
+
delete(job_id)
|
|
21
|
+
messages
|
|
22
|
+
rescue StandardError => e
|
|
23
|
+
delete(job_id) if job_id
|
|
24
|
+
raise Error, "Search failed: #{e.message}"
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
private
|
|
28
|
+
|
|
29
|
+
def create(query, from_time, to_time, time_zone)
|
|
30
|
+
data = @http.request(
|
|
31
|
+
method: :post,
|
|
32
|
+
path: '/search/jobs',
|
|
33
|
+
body: {
|
|
34
|
+
query: query,
|
|
35
|
+
from: from_time,
|
|
36
|
+
to: to_time,
|
|
37
|
+
timeZone: time_zone
|
|
38
|
+
}
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
raise Error, "Failed to create job: #{data['message']}" unless data['id']
|
|
42
|
+
|
|
43
|
+
log_info "Created search job: #{data['id']}"
|
|
44
|
+
data['id']
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
def delete(job_id)
|
|
48
|
+
return unless job_id
|
|
49
|
+
|
|
50
|
+
@http.request(
|
|
51
|
+
method: :delete,
|
|
52
|
+
path: "/search/jobs/#{job_id}"
|
|
53
|
+
)
|
|
54
|
+
log_info "Deleted search job: #{job_id}"
|
|
55
|
+
rescue StandardError => e
|
|
56
|
+
log_error "Failed to delete job #{job_id}: #{e.message}"
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
def log_info(message)
|
|
60
|
+
warn "[Sumologic::Search::Job] #{message}" if ENV['SUMO_DEBUG'] || $DEBUG
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
def log_error(message)
|
|
64
|
+
warn "[Sumologic::Search::Job ERROR] #{message}"
|
|
65
|
+
end
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
end
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Sumologic
|
|
4
|
+
module Search
|
|
5
|
+
# Handles paginated fetching of search job messages
|
|
6
|
+
class Paginator
|
|
7
|
+
def initialize(http_client:, config:)
|
|
8
|
+
@http = http_client
|
|
9
|
+
@config = config
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
# Fetch all messages for a job with automatic pagination
|
|
13
|
+
# Returns array of message objects
|
|
14
|
+
def fetch_all(job_id, limit: nil)
|
|
15
|
+
messages = []
|
|
16
|
+
offset = 0
|
|
17
|
+
total_fetched = 0
|
|
18
|
+
|
|
19
|
+
loop do
|
|
20
|
+
batch_limit = calculate_batch_limit(limit, total_fetched)
|
|
21
|
+
break if batch_limit <= 0
|
|
22
|
+
|
|
23
|
+
batch = fetch_batch(job_id, offset, batch_limit)
|
|
24
|
+
messages.concat(batch)
|
|
25
|
+
total_fetched += batch.size
|
|
26
|
+
|
|
27
|
+
log_progress(batch.size, total_fetched)
|
|
28
|
+
|
|
29
|
+
break if batch.size < batch_limit # No more messages
|
|
30
|
+
break if limit && total_fetched >= limit
|
|
31
|
+
|
|
32
|
+
offset += batch.size
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
messages
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
private
|
|
39
|
+
|
|
40
|
+
def calculate_batch_limit(user_limit, total_fetched)
|
|
41
|
+
if user_limit
|
|
42
|
+
[@config.max_messages_per_request, user_limit - total_fetched].min
|
|
43
|
+
else
|
|
44
|
+
@config.max_messages_per_request
|
|
45
|
+
end
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
def fetch_batch(job_id, offset, limit)
|
|
49
|
+
data = @http.request(
|
|
50
|
+
method: :get,
|
|
51
|
+
path: "/search/jobs/#{job_id}/messages",
|
|
52
|
+
query_params: { offset: offset, limit: limit }
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
data['messages'] || []
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def log_progress(batch_size, total)
|
|
59
|
+
log_info "Fetched #{batch_size} messages (total: #{total})"
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
def log_info(message)
|
|
63
|
+
warn "[Sumologic::Search::Paginator] #{message}" if ENV['SUMO_DEBUG'] || $DEBUG
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
end
|
|
67
|
+
end
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Sumologic
|
|
4
|
+
module Search
|
|
5
|
+
# Handles adaptive polling of search jobs with exponential backoff
|
|
6
|
+
class Poller
|
|
7
|
+
def initialize(http_client:, config:)
|
|
8
|
+
@http = http_client
|
|
9
|
+
@config = config
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
# Poll until job completes or times out
|
|
13
|
+
# Returns final job status data
|
|
14
|
+
def poll(job_id)
|
|
15
|
+
start_time = Time.now
|
|
16
|
+
interval = @config.initial_poll_interval
|
|
17
|
+
poll_count = 0
|
|
18
|
+
|
|
19
|
+
loop do
|
|
20
|
+
check_timeout!(start_time)
|
|
21
|
+
|
|
22
|
+
data = fetch_job_status(job_id)
|
|
23
|
+
state = data['state']
|
|
24
|
+
|
|
25
|
+
log_poll_status(state, data, interval, poll_count)
|
|
26
|
+
|
|
27
|
+
case state
|
|
28
|
+
when 'DONE GATHERING RESULTS'
|
|
29
|
+
log_completion(start_time, poll_count)
|
|
30
|
+
return data
|
|
31
|
+
when 'CANCELLED', 'FORCE PAUSED'
|
|
32
|
+
raise Error, "Search job #{state.downcase}"
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
sleep interval
|
|
36
|
+
poll_count += 1
|
|
37
|
+
interval = calculate_next_interval(interval)
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
private
|
|
42
|
+
|
|
43
|
+
def check_timeout!(start_time)
|
|
44
|
+
elapsed = Time.now - start_time
|
|
45
|
+
return unless elapsed > @config.timeout
|
|
46
|
+
|
|
47
|
+
raise TimeoutError, "Search job timed out after #{@config.timeout} seconds"
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
def fetch_job_status(job_id)
|
|
51
|
+
@http.request(
|
|
52
|
+
method: :get,
|
|
53
|
+
path: "/search/jobs/#{job_id}"
|
|
54
|
+
)
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
def calculate_next_interval(current_interval)
|
|
58
|
+
# Adaptive backoff: gradually increase interval for long-running jobs
|
|
59
|
+
new_interval = current_interval * @config.poll_backoff_factor
|
|
60
|
+
[new_interval, @config.max_poll_interval].min
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
def log_poll_status(state, data, interval, count)
|
|
64
|
+
msg_count = data['messageCount']
|
|
65
|
+
rec_count = data['recordCount']
|
|
66
|
+
log_info "Job state: #{state} (#{msg_count} messages, #{rec_count} records) " \
|
|
67
|
+
"[interval: #{interval}s, poll: #{count}]"
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
def log_completion(start_time, poll_count)
|
|
71
|
+
elapsed = Time.now - start_time
|
|
72
|
+
log_info "Job completed in #{elapsed.round(1)} seconds after #{poll_count + 1} polls"
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
def log_info(message)
|
|
76
|
+
warn "[Sumologic::Search::Poller] #{message}" if ENV['SUMO_DEBUG'] || $DEBUG
|
|
77
|
+
end
|
|
78
|
+
end
|
|
79
|
+
end
|
|
80
|
+
end
|
data/lib/sumologic/version.rb
CHANGED
data/lib/sumologic.rb
CHANGED
|
@@ -1,10 +1,41 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
3
|
require_relative 'sumologic/version'
|
|
4
|
-
require_relative 'sumologic/client'
|
|
5
4
|
|
|
6
5
|
module Sumologic
|
|
6
|
+
# Base error class for all Sumologic errors
|
|
7
7
|
class Error < StandardError; end
|
|
8
|
-
|
|
8
|
+
|
|
9
|
+
# Authentication-related errors
|
|
9
10
|
class AuthenticationError < Error; end
|
|
11
|
+
|
|
12
|
+
# Timeout errors during search job execution
|
|
13
|
+
class TimeoutError < Error; end
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
# Load configuration first
|
|
17
|
+
require_relative 'sumologic/configuration'
|
|
18
|
+
|
|
19
|
+
# Load HTTP layer
|
|
20
|
+
require_relative 'sumologic/http/authenticator'
|
|
21
|
+
require_relative 'sumologic/http/client'
|
|
22
|
+
|
|
23
|
+
# Load search domain
|
|
24
|
+
require_relative 'sumologic/search/poller'
|
|
25
|
+
require_relative 'sumologic/search/paginator'
|
|
26
|
+
require_relative 'sumologic/search/job'
|
|
27
|
+
|
|
28
|
+
# Load metadata domain
|
|
29
|
+
require_relative 'sumologic/metadata/collector'
|
|
30
|
+
require_relative 'sumologic/metadata/source'
|
|
31
|
+
|
|
32
|
+
# Load main client (facade)
|
|
33
|
+
require_relative 'sumologic/client'
|
|
34
|
+
|
|
35
|
+
# Load CLI (requires thor gem)
|
|
36
|
+
begin
|
|
37
|
+
require 'thor'
|
|
38
|
+
require_relative 'sumologic/cli'
|
|
39
|
+
rescue LoadError
|
|
40
|
+
# Thor not available - CLI won't work but library will
|
|
10
41
|
end
|
metadata
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: sumologic-query
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 1.1.
|
|
4
|
+
version: 1.1.2
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- patrick204nqh
|
|
8
8
|
autorequire:
|
|
9
9
|
bindir: bin
|
|
10
10
|
cert_chain: []
|
|
11
|
-
date: 2025-11-
|
|
11
|
+
date: 2025-11-14 00:00:00.000000000 Z
|
|
12
12
|
dependencies:
|
|
13
13
|
- !ruby/object:Gem::Dependency
|
|
14
14
|
name: base64
|
|
@@ -24,6 +24,20 @@ dependencies:
|
|
|
24
24
|
- - "~>"
|
|
25
25
|
- !ruby/object:Gem::Version
|
|
26
26
|
version: '0.1'
|
|
27
|
+
- !ruby/object:Gem::Dependency
|
|
28
|
+
name: thor
|
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
|
30
|
+
requirements:
|
|
31
|
+
- - "~>"
|
|
32
|
+
- !ruby/object:Gem::Version
|
|
33
|
+
version: '1.3'
|
|
34
|
+
type: :runtime
|
|
35
|
+
prerelease: false
|
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
37
|
+
requirements:
|
|
38
|
+
- - "~>"
|
|
39
|
+
- !ruby/object:Gem::Version
|
|
40
|
+
version: '1.3'
|
|
27
41
|
- !ruby/object:Gem::Dependency
|
|
28
42
|
name: rake
|
|
29
43
|
requirement: !ruby/object:Gem::Requirement
|
|
@@ -82,7 +96,16 @@ files:
|
|
|
82
96
|
- README.md
|
|
83
97
|
- bin/sumo-query
|
|
84
98
|
- lib/sumologic.rb
|
|
99
|
+
- lib/sumologic/cli.rb
|
|
85
100
|
- lib/sumologic/client.rb
|
|
101
|
+
- lib/sumologic/configuration.rb
|
|
102
|
+
- lib/sumologic/http/authenticator.rb
|
|
103
|
+
- lib/sumologic/http/client.rb
|
|
104
|
+
- lib/sumologic/metadata/collector.rb
|
|
105
|
+
- lib/sumologic/metadata/source.rb
|
|
106
|
+
- lib/sumologic/search/job.rb
|
|
107
|
+
- lib/sumologic/search/paginator.rb
|
|
108
|
+
- lib/sumologic/search/poller.rb
|
|
86
109
|
- lib/sumologic/version.rb
|
|
87
110
|
homepage: https://github.com/patrick204nqh/sumologic-query
|
|
88
111
|
licenses:
|