fluent-plugin-oceanbase-logs 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/lib/fluent/plugin/in_oceanbase_logs.rb +371 -0
- data/lib/fluent/plugin/oceanbase/logs/version.rb +9 -0
- metadata +130 -0
checksums.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
---
|
|
2
|
+
SHA256:
|
|
3
|
+
metadata.gz: 3a3230feaa77c86a454f2733f9097f05a2e57f43dd418c1fed7af21d5345a875
|
|
4
|
+
data.tar.gz: d2a7f628ddb81778b10c6b9a14ffc1d6fa1fe6cbc4b1e7771da21840a42ef93e
|
|
5
|
+
SHA512:
|
|
6
|
+
metadata.gz: f2f8cbbf267cdf26e929677364f9a0db2cfdc1256fa231bbdf85f347cd301d8fdbd26c1b219d2faed9a94cacfd177e5aad665f291f9aafc809ab8406f4e25d32
|
|
7
|
+
data.tar.gz: 9f1725a4250c1988960db905493ce797d66b096a6f586b3af6c3642956b3c5fb7194516d82c828f5c728b086dfe0fbc61f01ea39927a22dcf7742acad6c81a0c
|
|
@@ -0,0 +1,371 @@
|
|
|
1
|
+
require 'net/http'
|
|
2
|
+
require 'uri'
|
|
3
|
+
require 'json'
|
|
4
|
+
require 'openssl'
|
|
5
|
+
require 'time'
|
|
6
|
+
require 'digest'
|
|
7
|
+
require 'securerandom'
|
|
8
|
+
require 'fluent/plugin/input'
|
|
9
|
+
|
|
10
|
+
module Fluent::Plugin
|
|
11
|
+
class OceanBaseLogsInput < Input
|
|
12
|
+
Fluent::Plugin.register_input('oceanbase_logs', self)
|
|
13
|
+
|
|
14
|
+
helpers :thread, :storage
|
|
15
|
+
|
|
16
|
+
DEFAULT_STORAGE_TYPE = 'local'
|
|
17
|
+
|
|
18
|
+
LOG_TYPE_PATHS = {
|
|
19
|
+
'slow_sql' => 'slowSql',
|
|
20
|
+
'top_sql' => 'topSql',
|
|
21
|
+
}.freeze
|
|
22
|
+
|
|
23
|
+
config_param :log_type, :enum, list: LOG_TYPE_PATHS.keys.map(&:to_sym), default: :slow_sql,
|
|
24
|
+
desc: "Type of SQL diagnostics to collect: slow_sql or top_sql."
|
|
25
|
+
config_param :fetch_samples, :bool, default: false,
|
|
26
|
+
desc: "When true, fetch per-execution sample details for each SQL (each execution = one event)."
|
|
27
|
+
|
|
28
|
+
config_param :access_key_id, :string, secret: true,
|
|
29
|
+
desc: "OceanBase Cloud AccessKey ID."
|
|
30
|
+
config_param :access_key_secret, :string, secret: true,
|
|
31
|
+
desc: "OceanBase Cloud AccessKey Secret."
|
|
32
|
+
|
|
33
|
+
config_param :instance_id, :string,
|
|
34
|
+
desc: "OceanBase cluster instance ID."
|
|
35
|
+
config_param :tenant_id, :string,
|
|
36
|
+
desc: "OceanBase tenant ID."
|
|
37
|
+
config_param :project_id, :string, default: nil,
|
|
38
|
+
desc: "OceanBase Cloud project ID (X-Ob-Project-Id header)."
|
|
39
|
+
|
|
40
|
+
config_param :db_name, :string, default: nil,
|
|
41
|
+
desc: "Filter by database name."
|
|
42
|
+
config_param :search_keyword, :string, default: nil,
|
|
43
|
+
desc: "Search keyword for SQL text."
|
|
44
|
+
config_param :node_ip, :string, default: nil,
|
|
45
|
+
desc: "Filter by database node IP."
|
|
46
|
+
config_param :filter_condition, :string, default: nil,
|
|
47
|
+
desc: "Advanced filter (e.g. '@avgCpuTime > 20 and @executions > 100')."
|
|
48
|
+
config_param :sql_text_length, :integer, default: 65535,
|
|
49
|
+
desc: "Max length of SQL text returned."
|
|
50
|
+
|
|
51
|
+
config_param :tag, :string,
|
|
52
|
+
desc: "Fluentd tag for emitted events."
|
|
53
|
+
config_param :fetch_interval, :time, default: 300,
|
|
54
|
+
desc: "Seconds between each API poll (default 5 min)."
|
|
55
|
+
config_param :lookback_seconds, :integer, default: 600,
|
|
56
|
+
desc: "How far back each query window looks (default 10 min)."
|
|
57
|
+
config_param :endpoint, :string, default: 'api-cloud-cn.oceanbase.com',
|
|
58
|
+
desc: "API endpoint."
|
|
59
|
+
config_param :http_proxy, :string, default: nil,
|
|
60
|
+
desc: "HTTP proxy URL."
|
|
61
|
+
config_param :ssl_verify_peer, :bool, default: true,
|
|
62
|
+
desc: "Verify SSL certificates."
|
|
63
|
+
|
|
64
|
+
config_param :deduplicate, :bool, default: true,
|
|
65
|
+
desc: "Enable deduplication."
|
|
66
|
+
config_param :include_metadata, :bool, default: true,
|
|
67
|
+
desc: "Attach instance_id / tenant_id / log_type to each record."
|
|
68
|
+
|
|
69
|
+
config_section :storage do
|
|
70
|
+
config_set_default :usage, 'seen_traces'
|
|
71
|
+
config_set_default :@type, DEFAULT_STORAGE_TYPE
|
|
72
|
+
config_set_default :persistent, false
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
def configure(conf)
|
|
76
|
+
super
|
|
77
|
+
@endpoint = @endpoint.to_s.strip
|
|
78
|
+
@endpoint = 'api-cloud-cn.oceanbase.com' if @endpoint.empty?
|
|
79
|
+
@api_path_segment = LOG_TYPE_PATHS[@log_type.to_s]
|
|
80
|
+
if @deduplicate
|
|
81
|
+
@seen_storage = storage_create(
|
|
82
|
+
usage: 'seen_traces',
|
|
83
|
+
conf: config,
|
|
84
|
+
default_type: DEFAULT_STORAGE_TYPE
|
|
85
|
+
)
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
def start
|
|
90
|
+
super
|
|
91
|
+
@finished = false
|
|
92
|
+
thread_create(:in_oceanbase_logs_runner, &method(:run))
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
def shutdown
|
|
96
|
+
@finished = true
|
|
97
|
+
super
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
private
|
|
101
|
+
|
|
102
|
+
def run
|
|
103
|
+
until @finished
|
|
104
|
+
begin
|
|
105
|
+
fetch_and_emit
|
|
106
|
+
rescue => e
|
|
107
|
+
log.error "Failed to fetch OceanBase #{@log_type} data",
|
|
108
|
+
error: e.message, error_class: e.class.to_s
|
|
109
|
+
log.debug_backtrace(e.backtrace)
|
|
110
|
+
end
|
|
111
|
+
sleep_interruptible(@fetch_interval)
|
|
112
|
+
end
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
def sleep_interruptible(seconds)
|
|
116
|
+
seconds.to_i.times do
|
|
117
|
+
break if @finished
|
|
118
|
+
sleep 1
|
|
119
|
+
end
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
def fetch_and_emit
|
|
123
|
+
now = Time.now.utc
|
|
124
|
+
start_time = (now - @lookback_seconds).strftime('%Y-%m-%dT%H:%M:%SZ')
|
|
125
|
+
end_time = now.strftime('%Y-%m-%dT%H:%M:%SZ')
|
|
126
|
+
|
|
127
|
+
if @fetch_samples
|
|
128
|
+
fetch_and_emit_samples(start_time, end_time)
|
|
129
|
+
else
|
|
130
|
+
fetch_and_emit_list(start_time, end_time)
|
|
131
|
+
end
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
# ---- Mode 1: emit aggregated list (one record per sqlId) ----
|
|
135
|
+
def fetch_and_emit_list(start_time, end_time)
|
|
136
|
+
response = call_list_api(start_time, end_time)
|
|
137
|
+
return unless response
|
|
138
|
+
|
|
139
|
+
records = extract_records(response)
|
|
140
|
+
return if records.nil? || records.empty?
|
|
141
|
+
|
|
142
|
+
es = Fluent::MultiEventStream.new
|
|
143
|
+
|
|
144
|
+
records.each do |record|
|
|
145
|
+
sql_id = record['sqlId']
|
|
146
|
+
next if sql_id.nil? || sql_id.empty?
|
|
147
|
+
|
|
148
|
+
if @deduplicate
|
|
149
|
+
dedup_key = :"list_#{sql_id}_#{start_time}"
|
|
150
|
+
next if @seen_storage.get(dedup_key)
|
|
151
|
+
@seen_storage.put(dedup_key, Time.now.to_i.to_s)
|
|
152
|
+
end
|
|
153
|
+
|
|
154
|
+
record = attach_metadata(record, start_time, end_time) if @include_metadata
|
|
155
|
+
es.add(Fluent::EventTime.now, record)
|
|
156
|
+
end
|
|
157
|
+
|
|
158
|
+
router.emit_stream(@tag, es) unless es.empty?
|
|
159
|
+
log.info "Emitted #{es.size} #{@log_type} events (#{start_time} ~ #{end_time})" if es.size > 0
|
|
160
|
+
end
|
|
161
|
+
|
|
162
|
+
# ---- Mode 2: emit per-execution samples (one record per trace) ----
|
|
163
|
+
def fetch_and_emit_samples(start_time, end_time)
|
|
164
|
+
list_response = call_list_api(start_time, end_time)
|
|
165
|
+
return unless list_response
|
|
166
|
+
|
|
167
|
+
sql_records = extract_records(list_response)
|
|
168
|
+
return if sql_records.nil? || sql_records.empty?
|
|
169
|
+
|
|
170
|
+
sql_ids = sql_records.map { |r| r['sqlId'] }.compact.uniq
|
|
171
|
+
log.debug "Found #{sql_ids.size} unique SQL IDs, fetching samples..."
|
|
172
|
+
|
|
173
|
+
total_emitted = 0
|
|
174
|
+
|
|
175
|
+
sql_ids.each do |sql_id|
|
|
176
|
+
samples = fetch_samples_for_sql(sql_id, start_time, end_time)
|
|
177
|
+
next if samples.nil? || samples.empty?
|
|
178
|
+
|
|
179
|
+
es = Fluent::MultiEventStream.new
|
|
180
|
+
|
|
181
|
+
samples.each do |sample|
|
|
182
|
+
trace_id = sample['traceId']
|
|
183
|
+
dedup_id = trace_id || "#{sql_id}_#{sample['requestTime']}"
|
|
184
|
+
|
|
185
|
+
if @deduplicate
|
|
186
|
+
dedup_key = :"trace_#{dedup_id}"
|
|
187
|
+
next if @seen_storage.get(dedup_key)
|
|
188
|
+
@seen_storage.put(dedup_key, Time.now.to_i.to_s)
|
|
189
|
+
end
|
|
190
|
+
|
|
191
|
+
sample = attach_metadata(sample, start_time, end_time) if @include_metadata
|
|
192
|
+
|
|
193
|
+
event_time = if sample['requestTime']
|
|
194
|
+
begin
|
|
195
|
+
Fluent::EventTime.from_time(Time.parse(sample['requestTime']))
|
|
196
|
+
rescue
|
|
197
|
+
Fluent::EventTime.now
|
|
198
|
+
end
|
|
199
|
+
else
|
|
200
|
+
Fluent::EventTime.now
|
|
201
|
+
end
|
|
202
|
+
|
|
203
|
+
es.add(event_time, sample)
|
|
204
|
+
end
|
|
205
|
+
|
|
206
|
+
unless es.empty?
|
|
207
|
+
router.emit_stream(@tag, es)
|
|
208
|
+
total_emitted += es.size
|
|
209
|
+
end
|
|
210
|
+
end
|
|
211
|
+
|
|
212
|
+
log.info "Emitted #{total_emitted} #{@log_type} sample events (#{start_time} ~ #{end_time})" if total_emitted > 0
|
|
213
|
+
end
|
|
214
|
+
|
|
215
|
+
def fetch_samples_for_sql(sql_id, start_time, end_time)
|
|
216
|
+
path = "/api/v2/instances/#{@instance_id}/tenants/#{@tenant_id}/sqls/#{sql_id}/samples"
|
|
217
|
+
params = {
|
|
218
|
+
'startTime' => start_time,
|
|
219
|
+
'endTime' => end_time,
|
|
220
|
+
}
|
|
221
|
+
params['dbName'] = @db_name if @db_name
|
|
222
|
+
|
|
223
|
+
response = call_api_raw(path, params)
|
|
224
|
+
return nil unless response
|
|
225
|
+
extract_records(response)
|
|
226
|
+
end
|
|
227
|
+
|
|
228
|
+
def attach_metadata(record, start_time, end_time)
|
|
229
|
+
record.merge(
|
|
230
|
+
'ob_instance_id' => @instance_id,
|
|
231
|
+
'ob_tenant_id' => @tenant_id,
|
|
232
|
+
'ob_log_type' => @log_type.to_s,
|
|
233
|
+
'query_start_time' => start_time,
|
|
234
|
+
'query_end_time' => end_time
|
|
235
|
+
)
|
|
236
|
+
end
|
|
237
|
+
|
|
238
|
+
def extract_records(response)
|
|
239
|
+
if response['data'].is_a?(Hash) && response['data']['dataList'].is_a?(Array)
|
|
240
|
+
response['data']['dataList']
|
|
241
|
+
elsif response['Data'].is_a?(Array)
|
|
242
|
+
response['Data']
|
|
243
|
+
elsif response['data'].is_a?(Array)
|
|
244
|
+
response['data']
|
|
245
|
+
else
|
|
246
|
+
log.warn "Unexpected API response structure", keys: response.keys
|
|
247
|
+
nil
|
|
248
|
+
end
|
|
249
|
+
end
|
|
250
|
+
|
|
251
|
+
# ---- API calls ----
|
|
252
|
+
|
|
253
|
+
def call_list_api(start_time, end_time)
|
|
254
|
+
path = "/api/v2/instances/#{@instance_id}/tenants/#{@tenant_id}/#{@api_path_segment}"
|
|
255
|
+
params = {
|
|
256
|
+
'startTime' => start_time,
|
|
257
|
+
'endTime' => end_time,
|
|
258
|
+
}
|
|
259
|
+
params['dbName'] = @db_name if @db_name
|
|
260
|
+
params['searchKeyWord'] = @search_keyword if @search_keyword
|
|
261
|
+
params['nodeIp'] = @node_ip if @node_ip
|
|
262
|
+
params['filterCondition'] = @filter_condition if @filter_condition
|
|
263
|
+
params['sqlTextLength'] = @sql_text_length.to_s
|
|
264
|
+
|
|
265
|
+
call_api_raw(path, params)
|
|
266
|
+
end
|
|
267
|
+
|
|
268
|
+
def call_api_raw(path, params)
|
|
269
|
+
query = params.map { |k, v| "#{URI.encode_www_form_component(k)}=#{URI.encode_www_form_component(v)}" }.join('&')
|
|
270
|
+
uri = URI("https://#{@endpoint}#{path}?#{query}")
|
|
271
|
+
|
|
272
|
+
http = build_http(uri)
|
|
273
|
+
resp = nil
|
|
274
|
+
|
|
275
|
+
http.start do |session|
|
|
276
|
+
resp = request_with_digest_auth(session, uri)
|
|
277
|
+
end
|
|
278
|
+
|
|
279
|
+
unless resp && resp.code.to_i == 200
|
|
280
|
+
log.error "OceanBase API HTTP #{resp&.code}", body: resp&.body, path: path
|
|
281
|
+
return nil
|
|
282
|
+
end
|
|
283
|
+
|
|
284
|
+
body = JSON.parse(resp.body)
|
|
285
|
+
unless body['success'] == true
|
|
286
|
+
log.error "OceanBase API error",
|
|
287
|
+
code: body['errorCode'], message: body['errorMessage'], path: path
|
|
288
|
+
return nil
|
|
289
|
+
end
|
|
290
|
+
body
|
|
291
|
+
rescue JSON::ParserError => e
|
|
292
|
+
log.error "Failed to parse API response", error: e.message, path: path
|
|
293
|
+
nil
|
|
294
|
+
end
|
|
295
|
+
|
|
296
|
+
# ---- HTTP Digest Auth ----
|
|
297
|
+
|
|
298
|
+
def request_with_digest_auth(session, uri)
|
|
299
|
+
req = Net::HTTP::Get.new(uri)
|
|
300
|
+
req['X-Ob-Project-Id'] = @project_id if @project_id
|
|
301
|
+
|
|
302
|
+
initial_resp = session.request(req)
|
|
303
|
+
return initial_resp unless initial_resp.code.to_i == 401
|
|
304
|
+
|
|
305
|
+
auth_header = initial_resp['www-authenticate']
|
|
306
|
+
return initial_resp unless auth_header && auth_header.start_with?('Digest')
|
|
307
|
+
|
|
308
|
+
digest = build_digest_header(auth_header, uri, 'GET')
|
|
309
|
+
retry_req = Net::HTTP::Get.new(uri)
|
|
310
|
+
retry_req['X-Ob-Project-Id'] = @project_id if @project_id
|
|
311
|
+
retry_req['Authorization'] = digest
|
|
312
|
+
|
|
313
|
+
session.request(retry_req)
|
|
314
|
+
end
|
|
315
|
+
|
|
316
|
+
def build_digest_header(www_auth, uri, method)
|
|
317
|
+
params = parse_digest_challenge(www_auth)
|
|
318
|
+
realm = params['realm']
|
|
319
|
+
nonce = params['nonce']
|
|
320
|
+
qop = params['qop']
|
|
321
|
+
opaque = params['opaque']
|
|
322
|
+
|
|
323
|
+
nc = '00000001'
|
|
324
|
+
cnonce = SecureRandom.hex(8)
|
|
325
|
+
|
|
326
|
+
ha1 = md5("#{@access_key_id}:#{realm}:#{@access_key_secret}")
|
|
327
|
+
ha2 = md5("#{method}:#{uri.request_uri}")
|
|
328
|
+
|
|
329
|
+
if qop
|
|
330
|
+
response = md5("#{ha1}:#{nonce}:#{nc}:#{cnonce}:#{qop}:#{ha2}")
|
|
331
|
+
else
|
|
332
|
+
response = md5("#{ha1}:#{nonce}:#{ha2}")
|
|
333
|
+
end
|
|
334
|
+
|
|
335
|
+
header = %Q(Digest username="#{@access_key_id}", realm="#{realm}", nonce="#{nonce}", uri="#{uri.request_uri}", response="#{response}")
|
|
336
|
+
header += %Q(, qop=#{qop}, nc=#{nc}, cnonce="#{cnonce}") if qop
|
|
337
|
+
header += %Q(, opaque="#{opaque}") if opaque
|
|
338
|
+
header
|
|
339
|
+
end
|
|
340
|
+
|
|
341
|
+
def parse_digest_challenge(header)
|
|
342
|
+
params = {}
|
|
343
|
+
header.sub(/^Digest\s+/, '').scan(/(\w+)="([^"]*)"/) do |key, value|
|
|
344
|
+
params[key] = value
|
|
345
|
+
end
|
|
346
|
+
header.sub(/^Digest\s+/, '').scan(/(\w+)=([^",\s]+)/) do |key, value|
|
|
347
|
+
params[key] ||= value
|
|
348
|
+
end
|
|
349
|
+
params
|
|
350
|
+
end
|
|
351
|
+
|
|
352
|
+
def md5(str)
|
|
353
|
+
Digest::MD5.hexdigest(str)
|
|
354
|
+
end
|
|
355
|
+
|
|
356
|
+
def build_http(uri)
|
|
357
|
+
if @http_proxy
|
|
358
|
+
proxy = URI(@http_proxy)
|
|
359
|
+
http = Net::HTTP.new(uri.host, uri.port,
|
|
360
|
+
proxy.host, proxy.port, proxy.user, proxy.password)
|
|
361
|
+
else
|
|
362
|
+
http = Net::HTTP.new(uri.host, uri.port)
|
|
363
|
+
end
|
|
364
|
+
http.use_ssl = true
|
|
365
|
+
http.verify_mode = @ssl_verify_peer ? OpenSSL::SSL::VERIFY_PEER : OpenSSL::SSL::VERIFY_NONE
|
|
366
|
+
http.open_timeout = 30
|
|
367
|
+
http.read_timeout = 60
|
|
368
|
+
http
|
|
369
|
+
end
|
|
370
|
+
end
|
|
371
|
+
end
|
metadata
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
|
2
|
+
name: fluent-plugin-oceanbase-logs
|
|
3
|
+
version: !ruby/object:Gem::Version
|
|
4
|
+
version: 0.1.0
|
|
5
|
+
platform: ruby
|
|
6
|
+
authors:
|
|
7
|
+
- OceanBase Integrations
|
|
8
|
+
autorequire:
|
|
9
|
+
bindir: bin
|
|
10
|
+
cert_chain: []
|
|
11
|
+
date: 2026-03-06 00:00:00.000000000 Z
|
|
12
|
+
dependencies:
|
|
13
|
+
- !ruby/object:Gem::Dependency
|
|
14
|
+
name: fluentd
|
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
|
16
|
+
requirements:
|
|
17
|
+
- - ">="
|
|
18
|
+
- !ruby/object:Gem::Version
|
|
19
|
+
version: 1.8.0
|
|
20
|
+
type: :runtime
|
|
21
|
+
prerelease: false
|
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
23
|
+
requirements:
|
|
24
|
+
- - ">="
|
|
25
|
+
- !ruby/object:Gem::Version
|
|
26
|
+
version: 1.8.0
|
|
27
|
+
- !ruby/object:Gem::Dependency
|
|
28
|
+
name: bundler
|
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
|
30
|
+
requirements:
|
|
31
|
+
- - ">="
|
|
32
|
+
- !ruby/object:Gem::Version
|
|
33
|
+
version: '0'
|
|
34
|
+
type: :development
|
|
35
|
+
prerelease: false
|
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
37
|
+
requirements:
|
|
38
|
+
- - ">="
|
|
39
|
+
- !ruby/object:Gem::Version
|
|
40
|
+
version: '0'
|
|
41
|
+
- !ruby/object:Gem::Dependency
|
|
42
|
+
name: rake
|
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
|
44
|
+
requirements:
|
|
45
|
+
- - ">="
|
|
46
|
+
- !ruby/object:Gem::Version
|
|
47
|
+
version: '0'
|
|
48
|
+
type: :development
|
|
49
|
+
prerelease: false
|
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
51
|
+
requirements:
|
|
52
|
+
- - ">="
|
|
53
|
+
- !ruby/object:Gem::Version
|
|
54
|
+
version: '0'
|
|
55
|
+
- !ruby/object:Gem::Dependency
|
|
56
|
+
name: test-unit
|
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
|
58
|
+
requirements:
|
|
59
|
+
- - "~>"
|
|
60
|
+
- !ruby/object:Gem::Version
|
|
61
|
+
version: '3.0'
|
|
62
|
+
type: :development
|
|
63
|
+
prerelease: false
|
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
65
|
+
requirements:
|
|
66
|
+
- - "~>"
|
|
67
|
+
- !ruby/object:Gem::Version
|
|
68
|
+
version: '3.0'
|
|
69
|
+
- !ruby/object:Gem::Dependency
|
|
70
|
+
name: test-unit-rr
|
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
|
72
|
+
requirements:
|
|
73
|
+
- - ">="
|
|
74
|
+
- !ruby/object:Gem::Version
|
|
75
|
+
version: '0'
|
|
76
|
+
type: :development
|
|
77
|
+
prerelease: false
|
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
79
|
+
requirements:
|
|
80
|
+
- - ">="
|
|
81
|
+
- !ruby/object:Gem::Version
|
|
82
|
+
version: '0'
|
|
83
|
+
- !ruby/object:Gem::Dependency
|
|
84
|
+
name: webmock
|
|
85
|
+
requirement: !ruby/object:Gem::Requirement
|
|
86
|
+
requirements:
|
|
87
|
+
- - "~>"
|
|
88
|
+
- !ruby/object:Gem::Version
|
|
89
|
+
version: '3.0'
|
|
90
|
+
type: :development
|
|
91
|
+
prerelease: false
|
|
92
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
93
|
+
requirements:
|
|
94
|
+
- - "~>"
|
|
95
|
+
- !ruby/object:Gem::Version
|
|
96
|
+
version: '3.0'
|
|
97
|
+
description: Periodically fetches slow SQL information from OceanBase Cloud via the
|
|
98
|
+
DescribeOasSlowSQLList API and emits them as Fluentd events.
|
|
99
|
+
email:
|
|
100
|
+
- integrations@example.com
|
|
101
|
+
executables: []
|
|
102
|
+
extensions: []
|
|
103
|
+
extra_rdoc_files: []
|
|
104
|
+
files:
|
|
105
|
+
- lib/fluent/plugin/in_oceanbase_logs.rb
|
|
106
|
+
- lib/fluent/plugin/oceanbase/logs/version.rb
|
|
107
|
+
homepage: https://github.com/your-org/fluent-plugin-oceanbase-logs
|
|
108
|
+
licenses:
|
|
109
|
+
- MIT
|
|
110
|
+
metadata: {}
|
|
111
|
+
post_install_message:
|
|
112
|
+
rdoc_options: []
|
|
113
|
+
require_paths:
|
|
114
|
+
- lib
|
|
115
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
|
116
|
+
requirements:
|
|
117
|
+
- - ">="
|
|
118
|
+
- !ruby/object:Gem::Version
|
|
119
|
+
version: '2.4'
|
|
120
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
121
|
+
requirements:
|
|
122
|
+
- - ">="
|
|
123
|
+
- !ruby/object:Gem::Version
|
|
124
|
+
version: '0'
|
|
125
|
+
requirements: []
|
|
126
|
+
rubygems_version: 3.3.5
|
|
127
|
+
signing_key:
|
|
128
|
+
specification_version: 4
|
|
129
|
+
summary: Fluentd input plugin for OceanBase Cloud Logs
|
|
130
|
+
test_files: []
|