logseal 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/README.md +174 -0
- data/lib/logseal/client.rb +186 -0
- data/lib/logseal/error.rb +30 -0
- data/lib/logseal/resources/events.rb +77 -0
- data/lib/logseal/resources/exports.rb +40 -0
- data/lib/logseal/resources/organizations.rb +28 -0
- data/lib/logseal/resources/schemas.rb +35 -0
- data/lib/logseal/resources/viewer_tokens.rb +18 -0
- data/lib/logseal/resources/webhooks.rb +36 -0
- data/lib/logseal/version.rb +5 -0
- data/lib/logseal.rb +22 -0
- metadata +135 -0
checksums.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
---
|
|
2
|
+
SHA256:
|
|
3
|
+
metadata.gz: f5a6004e9884c8794c64dfb5071e98d30d113dbadc7569e3acc38b8ea4985de5
|
|
4
|
+
data.tar.gz: 865c789da10735a16729ba40e7d761d245663b3e9c6c8202e31dfc0b0211d133
|
|
5
|
+
SHA512:
|
|
6
|
+
metadata.gz: 4b1dad1ae610793fc31e690554d37c8fa9e1c55b7e8ecc74aaca8c646edaca3a3ef870f312ebacae72199184f7f7aeb606f0acb1f753894b33e451eb938d8943
|
|
7
|
+
data.tar.gz: 01350f3dc8c5f72df2f7adbae0e6af54604010dde63a73ecd30b14b3ea38b21d02e9427c1693ddcbb67a41e9774aaf7aef38b494f8400d9c292b0eb6b302a66a
|
data/README.md
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
# logseal
|
|
2
|
+
|
|
3
|
+
Official Ruby SDK for [LogSeal](https://logseal.io) — Audit logging for B2B SaaS.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
Add to your Gemfile:
|
|
8
|
+
|
|
9
|
+
```ruby
|
|
10
|
+
gem "logseal"
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
Or install directly:
|
|
14
|
+
|
|
15
|
+
```bash
|
|
16
|
+
gem install logseal
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
## Quick Start
|
|
20
|
+
|
|
21
|
+
```ruby
|
|
22
|
+
require "logseal"
|
|
23
|
+
|
|
24
|
+
client = LogSeal.new(api_key: "sk_test_...")
|
|
25
|
+
|
|
26
|
+
# Emit an event (batched, non-blocking)
|
|
27
|
+
client.emit(
|
|
28
|
+
action: "document.published",
|
|
29
|
+
organization_id: "org_acme",
|
|
30
|
+
actor: { id: "user_123", name: "Jane Smith", email: "jane@acme.com" },
|
|
31
|
+
targets: [{ type: "document", id: "doc_456", name: "Q3 Report" }],
|
|
32
|
+
metadata: { previous_status: "draft" }
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
# Emit and wait for confirmation
|
|
36
|
+
event = client.emit_sync(
|
|
37
|
+
action: "user.deleted",
|
|
38
|
+
organization_id: "org_acme",
|
|
39
|
+
actor: { id: "admin_1" },
|
|
40
|
+
targets: [{ type: "user", id: "user_123" }]
|
|
41
|
+
)
|
|
42
|
+
puts "Event ID: #{event['id']}"
|
|
43
|
+
|
|
44
|
+
# Graceful shutdown (flushes remaining events)
|
|
45
|
+
at_exit { client.shutdown }
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
## Configuration
|
|
49
|
+
|
|
50
|
+
```ruby
|
|
51
|
+
client = LogSeal.new(
|
|
52
|
+
api_key: "sk_live_...", # Required
|
|
53
|
+
base_url: "https://api.logseal.io", # Optional override
|
|
54
|
+
batch_size: 100, # Events to buffer before auto-flushing
|
|
55
|
+
flush_interval: 5, # Seconds between automatic flushes
|
|
56
|
+
max_retries: 3 # Retry attempts on 429 / 5xx responses
|
|
57
|
+
)
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
## Querying Events
|
|
61
|
+
|
|
62
|
+
```ruby
|
|
63
|
+
# Paginated list
|
|
64
|
+
page = client.events.list(
|
|
65
|
+
organization_id: "org_acme",
|
|
66
|
+
action: "document.published",
|
|
67
|
+
limit: 50
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
page["data"].each do |event|
|
|
71
|
+
puts "#{event['action']} by #{event.dig('actor', 'name')}"
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
# Auto-paginate through all results
|
|
75
|
+
client.events.list_all(organization_id: "org_acme") do |event|
|
|
76
|
+
puts event["action"]
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
# Or use as an Enumerator
|
|
80
|
+
client.events.list_all(organization_id: "org_acme").each_with_index do |event, i|
|
|
81
|
+
break if i >= 1000
|
|
82
|
+
puts event["action"]
|
|
83
|
+
end
|
|
84
|
+
```
|
|
85
|
+
|
|
86
|
+
## Organizations
|
|
87
|
+
|
|
88
|
+
```ruby
|
|
89
|
+
orgs = client.organizations.list
|
|
90
|
+
org = client.organizations.create(external_id: "acme", name: "Acme Corp")
|
|
91
|
+
org = client.organizations.get("org_123")
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
## Event Schemas
|
|
95
|
+
|
|
96
|
+
```ruby
|
|
97
|
+
schemas = client.schemas.list
|
|
98
|
+
schema = client.schemas.create(
|
|
99
|
+
action: "document.updated",
|
|
100
|
+
description: "Fired when a document is modified",
|
|
101
|
+
target_types: ["document"]
|
|
102
|
+
)
|
|
103
|
+
schema = client.schemas.update("sch_123", description: "Updated desc")
|
|
104
|
+
client.schemas.delete("sch_123")
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
## Viewer Tokens
|
|
108
|
+
|
|
109
|
+
```ruby
|
|
110
|
+
token = client.viewer_tokens.create(organization_id: "org_acme", expires_in: 3600)
|
|
111
|
+
# Pass token["token"] to your frontend for the embeddable viewer
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
## Webhooks
|
|
115
|
+
|
|
116
|
+
```ruby
|
|
117
|
+
webhooks = client.webhooks.list
|
|
118
|
+
webhook = client.webhooks.create(url: "https://example.com/webhooks/logseal", events: ["*"])
|
|
119
|
+
puts "Secret: #{webhook['secret']}" # Only returned on creation
|
|
120
|
+
|
|
121
|
+
client.webhooks.update("whk_123", enabled: false)
|
|
122
|
+
client.webhooks.delete("whk_123")
|
|
123
|
+
```
|
|
124
|
+
|
|
125
|
+
## Exports
|
|
126
|
+
|
|
127
|
+
```ruby
|
|
128
|
+
export = client.exports.create(
|
|
129
|
+
organization_id: "org_acme",
|
|
130
|
+
format: "csv",
|
|
131
|
+
filters: { after: "2024-01-01" }
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
# Poll until complete
|
|
135
|
+
completed = client.exports.poll(export["id"], timeout: 120)
|
|
136
|
+
puts "Download: #{completed['download_url']}"
|
|
137
|
+
```
|
|
138
|
+
|
|
139
|
+
## Verification
|
|
140
|
+
|
|
141
|
+
```ruby
|
|
142
|
+
result = client.events.verify(organization_id: "org_acme")
|
|
143
|
+
puts result["status"] # "valid", "broken", or "tampered"
|
|
144
|
+
```
|
|
145
|
+
|
|
146
|
+
## Error Handling
|
|
147
|
+
|
|
148
|
+
```ruby
|
|
149
|
+
begin
|
|
150
|
+
client.emit_sync(action: "test", organization_id: "o", actor: { id: "u" })
|
|
151
|
+
rescue LogSeal::Error => e
|
|
152
|
+
puts "[#{e.type}] #{e.code}: #{e.message}"
|
|
153
|
+
puts "HTTP status: #{e.status_code}"
|
|
154
|
+
end
|
|
155
|
+
```
|
|
156
|
+
|
|
157
|
+
All API errors are raised as `LogSeal::Error`:
|
|
158
|
+
|
|
159
|
+
| Method | Description |
|
|
160
|
+
|--------|-------------|
|
|
161
|
+
| `type` | Error category (`authentication_error`, `validation_error`, etc.) |
|
|
162
|
+
| `code` | Machine-readable code (`invalid_api_key`, `missing_required_field`, etc.) |
|
|
163
|
+
| `message` | Human-readable description |
|
|
164
|
+
| `param` | Request parameter that caused the error (if applicable) |
|
|
165
|
+
| `status_code` | HTTP status code |
|
|
166
|
+
|
|
167
|
+
## Requirements
|
|
168
|
+
|
|
169
|
+
- Ruby 3.0+
|
|
170
|
+
- Faraday 2.x
|
|
171
|
+
|
|
172
|
+
## License
|
|
173
|
+
|
|
174
|
+
MIT
|
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "faraday"
|
|
4
|
+
require "faraday/retry"
|
|
5
|
+
require "json"
|
|
6
|
+
require "concurrent-ruby" if defined?(Concurrent)
|
|
7
|
+
|
|
8
|
+
module LogSeal
|
|
9
|
+
# Main client for the LogSeal audit-logging API.
|
|
10
|
+
#
|
|
11
|
+
# client = LogSeal::Client.new(api_key: "sk_test_...")
|
|
12
|
+
# client.emit(action: "user.login", organization_id: "org_acme", actor: { id: "user_1" })
|
|
13
|
+
# client.shutdown
|
|
14
|
+
#
|
|
15
|
+
class Client
|
|
16
|
+
DEFAULT_BASE_URL = "https://api.logseal.io"
|
|
17
|
+
DEFAULT_BATCH_SIZE = 100
|
|
18
|
+
DEFAULT_FLUSH_INTERVAL = 5 # seconds
|
|
19
|
+
DEFAULT_MAX_RETRIES = 3
|
|
20
|
+
|
|
21
|
+
attr_reader :events, :organizations, :schemas, :viewer_tokens, :webhooks, :exports
|
|
22
|
+
|
|
23
|
+
# @param api_key [String] Your LogSeal API key.
|
|
24
|
+
# @param base_url [String] Override the API base URL.
|
|
25
|
+
# @param batch_size [Integer] Events to buffer before auto-flushing.
|
|
26
|
+
# @param flush_interval [Integer] Seconds between automatic flushes.
|
|
27
|
+
# @param max_retries [Integer] Retry attempts on 429 / 5xx responses.
|
|
28
|
+
def initialize(api_key:, base_url: DEFAULT_BASE_URL, batch_size: DEFAULT_BATCH_SIZE,
|
|
29
|
+
flush_interval: DEFAULT_FLUSH_INTERVAL, max_retries: DEFAULT_MAX_RETRIES)
|
|
30
|
+
raise ArgumentError, "api_key is required" if api_key.nil? || api_key.empty?
|
|
31
|
+
|
|
32
|
+
@api_key = api_key
|
|
33
|
+
@base_url = base_url
|
|
34
|
+
@batch_size = batch_size
|
|
35
|
+
@flush_interval = flush_interval
|
|
36
|
+
@queue = []
|
|
37
|
+
@mutex = Mutex.new
|
|
38
|
+
|
|
39
|
+
@conn = Faraday.new(url: @base_url) do |f|
|
|
40
|
+
f.request :retry, max: max_retries, interval: 1, backoff_factor: 2,
|
|
41
|
+
retry_statuses: [429, 500, 502, 503, 504]
|
|
42
|
+
f.headers["Authorization"] = "Bearer #{@api_key}"
|
|
43
|
+
f.headers["Content-Type"] = "application/json"
|
|
44
|
+
f.headers["User-Agent"] = "logseal-ruby/#{VERSION}"
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
@events = Resources::Events.new(self)
|
|
48
|
+
@organizations = Resources::Organizations.new(self)
|
|
49
|
+
@schemas = Resources::Schemas.new(self)
|
|
50
|
+
@viewer_tokens = Resources::ViewerTokens.new(self)
|
|
51
|
+
@webhooks = Resources::Webhooks.new(self)
|
|
52
|
+
@exports = Resources::Exports.new(self)
|
|
53
|
+
|
|
54
|
+
start_flush_thread
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
# Queue an event for batched delivery.
|
|
58
|
+
#
|
|
59
|
+
# client.emit(
|
|
60
|
+
# action: "document.published",
|
|
61
|
+
# organization_id: "org_acme",
|
|
62
|
+
# actor: { id: "user_123", name: "Jane Smith" },
|
|
63
|
+
# targets: [{ type: "document", id: "doc_456" }],
|
|
64
|
+
# metadata: { previous_status: "draft" }
|
|
65
|
+
# )
|
|
66
|
+
#
|
|
67
|
+
# @return [Hash] +{ status: "queued" }+
|
|
68
|
+
def emit(action:, organization_id:, actor:, targets: nil, metadata: nil,
|
|
69
|
+
context: nil, occurred_at: nil, idempotency_key: nil)
|
|
70
|
+
event = { action:, organization_id:, actor:, targets:, metadata:,
|
|
71
|
+
context:, occurred_at:, idempotency_key: }
|
|
72
|
+
validate_event!(event)
|
|
73
|
+
|
|
74
|
+
@mutex.synchronize { @queue << event }
|
|
75
|
+
flush if @queue.size >= @batch_size
|
|
76
|
+
|
|
77
|
+
{ status: "queued" }
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
# Emit a single event and wait for server confirmation.
|
|
81
|
+
#
|
|
82
|
+
# @return [Hash] The created event record.
|
|
83
|
+
def emit_sync(action:, organization_id:, actor:, **opts)
|
|
84
|
+
event = { action:, organization_id:, actor:, **opts }
|
|
85
|
+
validate_event!(event)
|
|
86
|
+
request(:post, "/v1/events", body: format_event(event))
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
# Flush all queued events immediately.
|
|
90
|
+
#
|
|
91
|
+
# @return [Integer] Number of events accepted.
|
|
92
|
+
def flush
|
|
93
|
+
batch = @mutex.synchronize do
|
|
94
|
+
return 0 if @queue.empty?
|
|
95
|
+
events = @queue.dup
|
|
96
|
+
@queue.clear
|
|
97
|
+
events
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
body = { events: batch.map { |e| format_event(e) } }
|
|
101
|
+
begin
|
|
102
|
+
resp = request(:post, "/v1/events/batch", body:)
|
|
103
|
+
resp["accepted"] || 0
|
|
104
|
+
rescue StandardError
|
|
105
|
+
@mutex.synchronize { @queue.unshift(*batch) }
|
|
106
|
+
raise
|
|
107
|
+
end
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
# Flush remaining events and stop the background thread.
|
|
111
|
+
def shutdown
|
|
112
|
+
@flush_thread&.kill
|
|
113
|
+
@flush_thread = nil
|
|
114
|
+
flush
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
# @api private
|
|
118
|
+
def request(method, path, body: nil, params: nil)
|
|
119
|
+
response = @conn.run_request(method, path, body ? JSON.generate(body) : nil, nil) do |req|
|
|
120
|
+
req.params.update(params) if params
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
data = response.body.empty? ? {} : JSON.parse(response.body)
|
|
124
|
+
|
|
125
|
+
unless response.success?
|
|
126
|
+
err = data["error"] || {}
|
|
127
|
+
raise Error.new(
|
|
128
|
+
type: err["type"] || "internal_error",
|
|
129
|
+
code: err["code"] || "unknown",
|
|
130
|
+
message: err["message"] || "Unknown error",
|
|
131
|
+
param: err["param"],
|
|
132
|
+
doc_url: err["doc_url"],
|
|
133
|
+
status_code: response.status
|
|
134
|
+
)
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
data
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
private
|
|
141
|
+
|
|
142
|
+
def start_flush_thread
|
|
143
|
+
@flush_thread = Thread.new do
|
|
144
|
+
loop do
|
|
145
|
+
sleep @flush_interval
|
|
146
|
+
flush
|
|
147
|
+
rescue StandardError
|
|
148
|
+
# Swallow — errors surface on next explicit flush
|
|
149
|
+
end
|
|
150
|
+
end
|
|
151
|
+
@flush_thread.abort_on_exception = false
|
|
152
|
+
end
|
|
153
|
+
|
|
154
|
+
def validate_event!(event)
|
|
155
|
+
raise Error.new(type: "validation_error", code: "missing_required_field",
|
|
156
|
+
message: "The 'action' field is required.", param: "action") if event[:action].nil? || event[:action].empty?
|
|
157
|
+
|
|
158
|
+
actor = event[:actor]
|
|
159
|
+
raise Error.new(type: "validation_error", code: "missing_required_field",
|
|
160
|
+
message: "The 'actor[:id]' field is required.", param: "actor.id") if actor.nil? || actor[:id].nil? || actor[:id].to_s.empty?
|
|
161
|
+
|
|
162
|
+
raise Error.new(type: "validation_error", code: "missing_required_field",
|
|
163
|
+
message: "The 'organization_id' field is required.", param: "organization_id") if event[:organization_id].nil? || event[:organization_id].empty?
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
def format_event(event)
|
|
167
|
+
payload = {
|
|
168
|
+
action: event[:action],
|
|
169
|
+
organization_id: event[:organization_id],
|
|
170
|
+
actor: event[:actor].compact,
|
|
171
|
+
}
|
|
172
|
+
payload[:targets] = event[:targets].map(&:compact) if event[:targets]
|
|
173
|
+
payload[:metadata] = event[:metadata] if event[:metadata]
|
|
174
|
+
if event[:context]
|
|
175
|
+
payload[:context] = {
|
|
176
|
+
ip_address: event[:context][:ip_address],
|
|
177
|
+
user_agent: event[:context][:user_agent],
|
|
178
|
+
request_id: event[:context][:request_id],
|
|
179
|
+
}.compact
|
|
180
|
+
end
|
|
181
|
+
payload[:occurred_at] = event[:occurred_at] if event[:occurred_at]
|
|
182
|
+
payload[:idempotency_key] = event[:idempotency_key] if event[:idempotency_key]
|
|
183
|
+
payload
|
|
184
|
+
end
|
|
185
|
+
end
|
|
186
|
+
end
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module LogSeal
|
|
4
|
+
# Raised when the LogSeal API returns an error response.
|
|
5
|
+
#
|
|
6
|
+
# begin
|
|
7
|
+
# client.events.get("bad_id")
|
|
8
|
+
# rescue LogSeal::Error => e
|
|
9
|
+
# puts e.type # => "not_found_error"
|
|
10
|
+
# puts e.code # => "event_not_found"
|
|
11
|
+
# puts e.message # => "Event not found"
|
|
12
|
+
# puts e.status_code # => 404
|
|
13
|
+
# end
|
|
14
|
+
class Error < StandardError
|
|
15
|
+
attr_reader :type, :code, :param, :doc_url, :status_code
|
|
16
|
+
|
|
17
|
+
def initialize(type:, code:, message:, param: nil, doc_url: nil, status_code: 400)
|
|
18
|
+
super(message)
|
|
19
|
+
@type = type
|
|
20
|
+
@code = code
|
|
21
|
+
@param = param
|
|
22
|
+
@doc_url = doc_url
|
|
23
|
+
@status_code = status_code
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
def to_s
|
|
27
|
+
"[#{type}] #{code}: #{super}"
|
|
28
|
+
end
|
|
29
|
+
end
|
|
30
|
+
end
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module LogSeal
|
|
4
|
+
module Resources
|
|
5
|
+
class Events
|
|
6
|
+
def initialize(client)
|
|
7
|
+
@client = client
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
# List events with filtering and pagination.
|
|
11
|
+
#
|
|
12
|
+
# page = client.events.list(organization_id: "org_acme", action: "user.login", limit: 50)
|
|
13
|
+
# page["data"].each { |e| puts e["action"] }
|
|
14
|
+
def list(organization_id:, action: nil, action_prefix: nil, actor_id: nil,
|
|
15
|
+
target_type: nil, target_id: nil, after: nil, before: nil,
|
|
16
|
+
search: nil, limit: nil, cursor: nil)
|
|
17
|
+
params = { organization_id: }.compact
|
|
18
|
+
params[:action] = action if action
|
|
19
|
+
params[:action_prefix] = action_prefix if action_prefix
|
|
20
|
+
params[:actor_id] = actor_id if actor_id
|
|
21
|
+
params[:target_type] = target_type if target_type
|
|
22
|
+
params[:target_id] = target_id if target_id
|
|
23
|
+
params[:after] = after if after
|
|
24
|
+
params[:before] = before if before
|
|
25
|
+
params[:search] = search if search
|
|
26
|
+
params[:limit] = limit if limit
|
|
27
|
+
params[:cursor] = cursor if cursor
|
|
28
|
+
|
|
29
|
+
@client.request(:get, "/v1/events", params:)
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
# Retrieve a single event by ID.
|
|
33
|
+
def get(event_id)
|
|
34
|
+
@client.request(:get, "/v1/events/#{event_id}")
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
# Verify hash-chain integrity.
|
|
38
|
+
def verify(organization_id:, after: nil, before: nil)
|
|
39
|
+
body = { organization_id: }
|
|
40
|
+
body[:after] = after if after
|
|
41
|
+
body[:before] = before if before
|
|
42
|
+
@client.request(:post, "/v1/events/verify", body:)
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
# Verify a specific sequence range.
|
|
46
|
+
def verify_range(organization_id:, from_sequence:, to_sequence:)
|
|
47
|
+
@client.request(:post, "/v1/events/verify-range", body: {
|
|
48
|
+
organization_id:, from_sequence:, to_sequence:
|
|
49
|
+
})
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
# Retrieve the Merkle proof for an event.
|
|
53
|
+
def get_proof(event_id)
|
|
54
|
+
@client.request(:get, "/v1/events/#{event_id}/proof")
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
# Auto-paginate through all matching events.
|
|
58
|
+
#
|
|
59
|
+
# client.events.list_all(organization_id: "org_acme") do |event|
|
|
60
|
+
# puts "#{event['action']} by #{event.dig('actor', 'name')}"
|
|
61
|
+
# end
|
|
62
|
+
#
|
|
63
|
+
# @return [Enumerator] if no block is given.
|
|
64
|
+
def list_all(**params, &block)
|
|
65
|
+
return enum_for(:list_all, **params) unless block
|
|
66
|
+
|
|
67
|
+
cursor = nil
|
|
68
|
+
loop do
|
|
69
|
+
page = list(**params, cursor:)
|
|
70
|
+
page["data"].each(&block)
|
|
71
|
+
cursor = page["next_cursor"]
|
|
72
|
+
break unless page["has_more"] && cursor
|
|
73
|
+
end
|
|
74
|
+
end
|
|
75
|
+
end
|
|
76
|
+
end
|
|
77
|
+
end
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module LogSeal
|
|
4
|
+
module Resources
|
|
5
|
+
class Exports
|
|
6
|
+
def initialize(client)
|
|
7
|
+
@client = client
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
# Start a new export job.
|
|
11
|
+
def create(organization_id:, format:, filters: nil)
|
|
12
|
+
body = { organization_id:, format: }
|
|
13
|
+
body[:filters] = filters if filters
|
|
14
|
+
@client.request(:post, "/v1/exports", body:)
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
# Check the status of an export job.
|
|
18
|
+
def get(id)
|
|
19
|
+
@client.request(:get, "/v1/exports/#{id}")
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
# Poll an export until it completes or fails.
|
|
23
|
+
#
|
|
24
|
+
# @param id [String] The export job ID.
|
|
25
|
+
# @param interval [Numeric] Seconds between polls (default 1).
|
|
26
|
+
# @param timeout [Numeric] Maximum seconds to wait (default 60).
|
|
27
|
+
# @return [Hash] The completed or failed export.
|
|
28
|
+
def poll(id, interval: 1, timeout: 60)
|
|
29
|
+
deadline = Time.now + timeout
|
|
30
|
+
loop do
|
|
31
|
+
export = get(id)
|
|
32
|
+
return export if %w[completed failed].include?(export["status"])
|
|
33
|
+
raise Error.new(type: "internal_error", code: "export_timeout",
|
|
34
|
+
message: "Export did not complete within the timeout period.") if Time.now >= deadline
|
|
35
|
+
sleep interval
|
|
36
|
+
end
|
|
37
|
+
end
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
end
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module LogSeal
|
|
4
|
+
module Resources
|
|
5
|
+
class Organizations
|
|
6
|
+
def initialize(client)
|
|
7
|
+
@client = client
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
# List all organizations.
|
|
11
|
+
def list
|
|
12
|
+
@client.request(:get, "/v1/organizations")
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
# Create a new organization.
|
|
16
|
+
def create(external_id:, name: nil)
|
|
17
|
+
body = { external_id: }
|
|
18
|
+
body[:name] = name if name
|
|
19
|
+
@client.request(:post, "/v1/organizations", body:)
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
# Retrieve an organization by ID.
|
|
23
|
+
def get(id)
|
|
24
|
+
@client.request(:get, "/v1/organizations/#{id}")
|
|
25
|
+
end
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
end
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module LogSeal
|
|
4
|
+
module Resources
|
|
5
|
+
class Schemas
|
|
6
|
+
def initialize(client)
|
|
7
|
+
@client = client
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
def list
|
|
11
|
+
@client.request(:get, "/v1/schemas")
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def create(action:, description: nil, target_types: nil, metadata_schema: nil)
|
|
15
|
+
body = { action: }
|
|
16
|
+
body[:description] = description if description
|
|
17
|
+
body[:target_types] = target_types if target_types
|
|
18
|
+
body[:metadata_schema] = metadata_schema if metadata_schema
|
|
19
|
+
@client.request(:post, "/v1/schemas", body:)
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
def get(id)
|
|
23
|
+
@client.request(:get, "/v1/schemas/#{id}")
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
def update(id, **fields)
|
|
27
|
+
@client.request(:patch, "/v1/schemas/#{id}", body: fields)
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def delete(id)
|
|
31
|
+
@client.request(:delete, "/v1/schemas/#{id}")
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
end
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module LogSeal
|
|
4
|
+
module Resources
|
|
5
|
+
class ViewerTokens
|
|
6
|
+
def initialize(client)
|
|
7
|
+
@client = client
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
# Create a short-lived viewer token for the embeddable log viewer.
|
|
11
|
+
def create(organization_id:, expires_in: nil)
|
|
12
|
+
body = { organization_id: }
|
|
13
|
+
body[:expires_in] = expires_in if expires_in
|
|
14
|
+
@client.request(:post, "/v1/viewer-tokens", body:)
|
|
15
|
+
end
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
end
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module LogSeal
|
|
4
|
+
module Resources
|
|
5
|
+
class Webhooks
|
|
6
|
+
def initialize(client)
|
|
7
|
+
@client = client
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
def list
|
|
11
|
+
@client.request(:get, "/v1/webhooks")
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
# Create a new webhook. The signing +secret+ is only returned once.
|
|
15
|
+
def create(url:, organization_id: nil, events: nil, enabled: nil)
|
|
16
|
+
body = { url: }
|
|
17
|
+
body[:organization_id] = organization_id if organization_id
|
|
18
|
+
body[:events] = events if events
|
|
19
|
+
body[:enabled] = enabled unless enabled.nil?
|
|
20
|
+
@client.request(:post, "/v1/webhooks", body:)
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def get(id)
|
|
24
|
+
@client.request(:get, "/v1/webhooks/#{id}")
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def update(id, **fields)
|
|
28
|
+
@client.request(:patch, "/v1/webhooks/#{id}", body: fields)
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
def delete(id)
|
|
32
|
+
@client.request(:delete, "/v1/webhooks/#{id}")
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
end
|
data/lib/logseal.rb
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "logseal/version"
|
|
4
|
+
require_relative "logseal/error"
|
|
5
|
+
require_relative "logseal/client"
|
|
6
|
+
require_relative "logseal/resources/events"
|
|
7
|
+
require_relative "logseal/resources/organizations"
|
|
8
|
+
require_relative "logseal/resources/schemas"
|
|
9
|
+
require_relative "logseal/resources/viewer_tokens"
|
|
10
|
+
require_relative "logseal/resources/webhooks"
|
|
11
|
+
require_relative "logseal/resources/exports"
|
|
12
|
+
|
|
13
|
+
module LogSeal
|
|
14
|
+
class << self
|
|
15
|
+
# Convenience constructor.
|
|
16
|
+
#
|
|
17
|
+
# client = LogSeal.new(api_key: "sk_test_...")
|
|
18
|
+
def new(**kwargs)
|
|
19
|
+
Client.new(**kwargs)
|
|
20
|
+
end
|
|
21
|
+
end
|
|
22
|
+
end
|
metadata
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
|
2
|
+
name: logseal
|
|
3
|
+
version: !ruby/object:Gem::Version
|
|
4
|
+
version: 0.1.0
|
|
5
|
+
platform: ruby
|
|
6
|
+
authors:
|
|
7
|
+
- LogSeal
|
|
8
|
+
autorequire:
|
|
9
|
+
bindir: bin
|
|
10
|
+
cert_chain: []
|
|
11
|
+
date: 2026-03-06 00:00:00.000000000 Z
|
|
12
|
+
dependencies:
|
|
13
|
+
- !ruby/object:Gem::Dependency
|
|
14
|
+
name: faraday
|
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
|
16
|
+
requirements:
|
|
17
|
+
- - ">="
|
|
18
|
+
- !ruby/object:Gem::Version
|
|
19
|
+
version: '2.0'
|
|
20
|
+
- - "<"
|
|
21
|
+
- !ruby/object:Gem::Version
|
|
22
|
+
version: '3.0'
|
|
23
|
+
type: :runtime
|
|
24
|
+
prerelease: false
|
|
25
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
26
|
+
requirements:
|
|
27
|
+
- - ">="
|
|
28
|
+
- !ruby/object:Gem::Version
|
|
29
|
+
version: '2.0'
|
|
30
|
+
- - "<"
|
|
31
|
+
- !ruby/object:Gem::Version
|
|
32
|
+
version: '3.0'
|
|
33
|
+
- !ruby/object:Gem::Dependency
|
|
34
|
+
name: faraday-retry
|
|
35
|
+
requirement: !ruby/object:Gem::Requirement
|
|
36
|
+
requirements:
|
|
37
|
+
- - ">="
|
|
38
|
+
- !ruby/object:Gem::Version
|
|
39
|
+
version: '2.0'
|
|
40
|
+
- - "<"
|
|
41
|
+
- !ruby/object:Gem::Version
|
|
42
|
+
version: '3.0'
|
|
43
|
+
type: :runtime
|
|
44
|
+
prerelease: false
|
|
45
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
46
|
+
requirements:
|
|
47
|
+
- - ">="
|
|
48
|
+
- !ruby/object:Gem::Version
|
|
49
|
+
version: '2.0'
|
|
50
|
+
- - "<"
|
|
51
|
+
- !ruby/object:Gem::Version
|
|
52
|
+
version: '3.0'
|
|
53
|
+
- !ruby/object:Gem::Dependency
|
|
54
|
+
name: rspec
|
|
55
|
+
requirement: !ruby/object:Gem::Requirement
|
|
56
|
+
requirements:
|
|
57
|
+
- - "~>"
|
|
58
|
+
- !ruby/object:Gem::Version
|
|
59
|
+
version: '3.12'
|
|
60
|
+
type: :development
|
|
61
|
+
prerelease: false
|
|
62
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
63
|
+
requirements:
|
|
64
|
+
- - "~>"
|
|
65
|
+
- !ruby/object:Gem::Version
|
|
66
|
+
version: '3.12'
|
|
67
|
+
- !ruby/object:Gem::Dependency
|
|
68
|
+
name: webmock
|
|
69
|
+
requirement: !ruby/object:Gem::Requirement
|
|
70
|
+
requirements:
|
|
71
|
+
- - "~>"
|
|
72
|
+
- !ruby/object:Gem::Version
|
|
73
|
+
version: '3.19'
|
|
74
|
+
type: :development
|
|
75
|
+
prerelease: false
|
|
76
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
77
|
+
requirements:
|
|
78
|
+
- - "~>"
|
|
79
|
+
- !ruby/object:Gem::Version
|
|
80
|
+
version: '3.19'
|
|
81
|
+
- !ruby/object:Gem::Dependency
|
|
82
|
+
name: rubocop
|
|
83
|
+
requirement: !ruby/object:Gem::Requirement
|
|
84
|
+
requirements:
|
|
85
|
+
- - "~>"
|
|
86
|
+
- !ruby/object:Gem::Version
|
|
87
|
+
version: '1.60'
|
|
88
|
+
type: :development
|
|
89
|
+
prerelease: false
|
|
90
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
91
|
+
requirements:
|
|
92
|
+
- - "~>"
|
|
93
|
+
- !ruby/object:Gem::Version
|
|
94
|
+
version: '1.60'
|
|
95
|
+
description: Audit logging for B2B SaaS — emit, query, verify, and export audit events.
|
|
96
|
+
email: support@logseal.io
|
|
97
|
+
executables: []
|
|
98
|
+
extensions: []
|
|
99
|
+
extra_rdoc_files: []
|
|
100
|
+
files:
|
|
101
|
+
- README.md
|
|
102
|
+
- lib/logseal.rb
|
|
103
|
+
- lib/logseal/client.rb
|
|
104
|
+
- lib/logseal/error.rb
|
|
105
|
+
- lib/logseal/resources/events.rb
|
|
106
|
+
- lib/logseal/resources/exports.rb
|
|
107
|
+
- lib/logseal/resources/organizations.rb
|
|
108
|
+
- lib/logseal/resources/schemas.rb
|
|
109
|
+
- lib/logseal/resources/viewer_tokens.rb
|
|
110
|
+
- lib/logseal/resources/webhooks.rb
|
|
111
|
+
- lib/logseal/version.rb
|
|
112
|
+
homepage: https://github.com/LogSeal/logseal-sdks/tree/main/packages/ruby
|
|
113
|
+
licenses:
|
|
114
|
+
- MIT
|
|
115
|
+
metadata: {}
|
|
116
|
+
post_install_message:
|
|
117
|
+
rdoc_options: []
|
|
118
|
+
require_paths:
|
|
119
|
+
- lib
|
|
120
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
|
121
|
+
requirements:
|
|
122
|
+
- - ">="
|
|
123
|
+
- !ruby/object:Gem::Version
|
|
124
|
+
version: '3.0'
|
|
125
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
126
|
+
requirements:
|
|
127
|
+
- - ">="
|
|
128
|
+
- !ruby/object:Gem::Version
|
|
129
|
+
version: '0'
|
|
130
|
+
requirements: []
|
|
131
|
+
rubygems_version: 3.4.1
|
|
132
|
+
signing_key:
|
|
133
|
+
specification_version: 4
|
|
134
|
+
summary: Official Ruby SDK for LogSeal
|
|
135
|
+
test_files: []
|