logstash-input-elasticsearch 4.23.0 → 5.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,38 +0,0 @@
1
- -----BEGIN CERTIFICATE-----
2
- MIIDIzCCAgugAwIBAgIBATANBgkqhkiG9w0BAQsFADA0MTIwMAYDVQQDEylFbGFz
3
- dGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBDQTAeFw0yNDEyMjYy
4
- MjI3MTVaFw0yNTEyMjYyMjI3MTVaMA0xCzAJBgNVBAMTAmVzMIIBIjANBgkqhkiG
5
- 9w0BAQEFAAOCAQ8AMIIBCgKCAQEArZLZvLSWDK7Ul+AaBnjU81dsfaow8zOjCC5V
6
- V21nXpYzQJoQbuWcvGYxwL7ZDs2ca4Wc8BVCj1NDduHuP7U+QIlUdQpl8kh5a0Zz
7
- 36pcFw7UyF51/AzWixJrht/Azzkb5cpZtE22ZK0KhS4oCsjJmTN0EABAsGhDI9/c
8
- MjNrUC7iP0dvfOuzAPp7ufY83h98jKKXUYV24snbbvmqoWI6GQQNSG/sEo1+1UGH
9
- /z07/mVKoBAa5DVoNGvxN0fCE7vW7hkhT8+frJcsYFatAbnf6ql0KzEa8lN9u0gR
10
- hQNM3zcKKsjEMomBzVBc4SV3KXO0d/jGdDtlqsm2oXqlTMdtGwIDAQABo2cwZTAY
11
- BgNVHREEETAPgg1lbGFzdGljc2VhcmNoMAkGA1UdEwQCMAAwHQYDVR0OBBYEFFQU
12
- K+6Cg2kExRj1xSDzEi4kkgKXMB8GA1UdIwQYMBaAFMgkye5+2l+TE0I6RsXRHjGB
13
- wpBGMA0GCSqGSIb3DQEBCwUAA4IBAQB6cZ7IrDzcAoOZgAt9RlOe2yzQeH+alttp
14
- CSQVINjJotS1WvmtqjBB6ArqLpXIGU89TZsktNe/NQJzgYSaMnlIuHVLFdxJYmwU
15
- T1cP6VC/brmqP/dd5y7VWE7Lp+Wd5CxKl/WY+9chmgc+a1fW/lnPEJJ6pca1Bo8b
16
- byIL0yY2IUv4R2eh1IyQl9oGH1GOPLgO7cY04eajxYcOVA2eDSItoyDtrJfkFP/P
17
- UXtC1JAkvWKuujFEiBj0AannhroWlp3gvChhBwCuCAU0KXD6g8BE8tn6oT1+FW7J
18
- avSfHxAe+VHtYhF8sJ8jrdm0d7E4GKS9UR/pkLAL1JuRdJ1VkPx3
19
- -----END CERTIFICATE-----
20
- -----BEGIN CERTIFICATE-----
21
- MIIDFTCCAf2gAwIBAgIBATANBgkqhkiG9w0BAQsFADA0MTIwMAYDVQQDEylFbGFz
22
- dGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBDQTAeFw0yNDEyMjYy
23
- MjI3MTVaFw0yNTEyMjYyMjI3MTVaMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlm
24
- aWNhdGUgVG9vbCBBdXRvZ2VuZXJhdGVkIENBMIIBIjANBgkqhkiG9w0BAQEFAAOC
25
- AQ8AMIIBCgKCAQEArUe66xG4Y2zO13gRC+rBwyvxe+c01pqV6ukw6isIbJIQWs1/
26
- QfEMhUwYwKs6/UXxK+VwardcA2zYwngXbGGEtms+mpUfH5CdJnrqW7lHz1BVK4yH
27
- 90IzGE0GU4D90OW/L4QkGX0fv3VQbL8KGFKBoF04pXIaSGMStFN4wirutHtQboYv
28
- 99X4kbLjVSIuubUpA/v9dUP1TNl8ar+HKUWRM96ijHkFTF3FR0NnZyt44gP5qC0h
29
- i4lUiR6Uo9D6WMFjeRYFF7GolCy/I1SzWBmmOnNhQLO5VxcNG4ldhBcapZeGwE98
30
- m/5lxLIwgFR9ZP8bXdxZTWLC58/LQ2NqOjA9mwIDAQABozIwMDAPBgNVHRMBAf8E
31
- BTADAQH/MB0GA1UdDgQWBBTIJMnuftpfkxNCOkbF0R4xgcKQRjANBgkqhkiG9w0B
32
- AQsFAAOCAQEAhfg/cmXc4Uh90yiXU8jOW8saQjTsq4ZMDQiLfJsNmNNYmHFN0vhv
33
- lJRI1STdy7+GpjS5QbrMjQIxWSS8X8xysE4Rt81IrWmLuao35TRFyoiE1seBQ5sz
34
- p/BxZUe57JvWi9dyzv2df4UfWFdGBhzdr80odZmz4i5VIv6qCKJKsGikcuLpepmp
35
- E/UKnKHeR/dFWsxzA9P2OzHTUNBMOOA2PyAUL49pwoChwJeOWN/zAgwMWLbuHFG0
36
- IN0u8swAmeH98QdvzbhiOatGNpqfTNvQEDc19yVjfXKpBVZQ79WtronYSqrbrUa1
37
- T2zD8bIVP7CdddD/UmpT1SSKh4PJxudy5Q==
38
- -----END CERTIFICATE-----
@@ -1,15 +0,0 @@
1
- #!/usr/bin/env bash
2
-
3
- set -e
4
- cd "$(dirname "$0")"
5
-
6
- openssl x509 -x509toreq -in ca.crt -copy_extensions copyall -signkey ca.key -out ca.csr
7
- openssl x509 -req -copy_extensions copyall -days 365 -in ca.csr -set_serial 0x01 -signkey ca.key -out ca.crt && rm ca.csr
8
- openssl x509 -in ca.crt -outform der | sha256sum | awk '{print $1}' > ca.der.sha256
9
-
10
- openssl x509 -x509toreq -in es.crt -copy_extensions copyall -signkey es.key -out es.csr
11
- openssl x509 -req -copy_extensions copyall -days 365 -in es.csr -set_serial 0x01 -CA ca.crt -CAkey ca.key -out es.crt && rm es.csr
12
- cat es.crt ca.crt > es.chain.crt
13
-
14
- # output ISO8601 timestamp to file
15
- date -Iseconds > GENERATED_AT
@@ -1,72 +0,0 @@
1
- # encoding: utf-8
2
- require "logstash/devutils/rspec/spec_helper"
3
- require "logstash/devutils/rspec/shared_examples"
4
- require "logstash/inputs/elasticsearch"
5
- require "logstash/inputs/elasticsearch/cursor_tracker"
6
-
7
- describe LogStash::Inputs::Elasticsearch::CursorTracker do
8
-
9
- let(:last_run_metadata_path) { Tempfile.new('cursor_tracker_testing').path }
10
- let(:tracking_field_seed) { "1980-01-01T23:59:59.999999999Z" }
11
- let(:options) do
12
- {
13
- :last_run_metadata_path => last_run_metadata_path,
14
- :tracking_field => "my_field",
15
- :tracking_field_seed => tracking_field_seed
16
- }
17
- end
18
-
19
- subject { described_class.new(**options) }
20
-
21
- it "creating a class works" do
22
- expect(subject).to be_a described_class
23
- end
24
-
25
- describe "checkpoint_cursor" do
26
- before(:each) do
27
- subject.checkpoint_cursor(intermediate: false) # store seed value
28
- [
29
- Thread.new(subject) {|subject| subject.record_last_value(LogStash::Event.new("my_field" => "2025-01-03T23:59:59.999999999Z")) },
30
- Thread.new(subject) {|subject| subject.record_last_value(LogStash::Event.new("my_field" => "2025-01-01T23:59:59.999999999Z")) },
31
- Thread.new(subject) {|subject| subject.record_last_value(LogStash::Event.new("my_field" => "2025-01-02T23:59:59.999999999Z")) },
32
- ].each(&:join)
33
- end
34
- context "when doing intermediate checkpoint" do
35
- it "persists the smallest value" do
36
- subject.checkpoint_cursor(intermediate: true)
37
- expect(IO.read(last_run_metadata_path)).to eq("2025-01-01T23:59:59.999999999Z")
38
- end
39
- end
40
- context "when doing non-intermediate checkpoint" do
41
- it "persists the largest value" do
42
- subject.checkpoint_cursor(intermediate: false)
43
- expect(IO.read(last_run_metadata_path)).to eq("2025-01-03T23:59:59.999999999Z")
44
- end
45
- end
46
- end
47
-
48
- describe "inject_cursor" do
49
- let(:new_value) { "2025-01-03T23:59:59.999999999Z" }
50
- let(:fake_now) { "2026-09-19T23:59:59.999999999Z" }
51
-
52
- let(:query) do
53
- %q[
54
- { "query": { "range": { "event.ingested": { "gt": :last_value, "lt": :present}}}, "sort": [ { "event.ingested": {"order": "asc", "format": "strict_date_optional_time_nanos", "numeric_type" : "date_nanos" } } ] }
55
- ]
56
- end
57
-
58
- before(:each) do
59
- subject.record_last_value(LogStash::Event.new("my_field" => new_value))
60
- subject.checkpoint_cursor(intermediate: false)
61
- allow(subject).to receive(:now_minus_30s).and_return(fake_now)
62
- end
63
-
64
- it "injects the value of the cursor into json query if it contains :last_value" do
65
- expect(subject.inject_cursor(query)).to match(/#{new_value}/)
66
- end
67
-
68
- it "injects current time into json query if it contains :present" do
69
- expect(subject.inject_cursor(query)).to match(/#{fake_now}/)
70
- end
71
- end
72
- end
@@ -1,180 +0,0 @@
1
- # encoding: utf-8
2
- require "logstash/devutils/rspec/spec_helper"
3
- require "logstash/inputs/elasticsearch"
4
- require "elasticsearch"
5
-
6
- describe LogStash::Inputs::Elasticsearch::Esql do
7
- let(:client) { instance_double(Elasticsearch::Client) }
8
- let(:esql_client) { double("esql-client") }
9
-
10
- let(:plugin) { instance_double(LogStash::Inputs::Elasticsearch, params: plugin_config, decorate_event: nil) }
11
- let(:plugin_config) do
12
- {
13
- "query" => "FROM test-index | STATS count() BY field",
14
- "retries" => 3
15
- }
16
- end
17
- let(:esql_executor) { described_class.new(client, plugin) }
18
-
19
- describe "#initialization" do
20
- it "sets up the ESQL client with correct parameters" do
21
- expect(esql_executor.instance_variable_get(:@query)).to eq(plugin_config["query"])
22
- expect(esql_executor.instance_variable_get(:@retries)).to eq(plugin_config["retries"])
23
- expect(esql_executor.instance_variable_get(:@target_field)).to eq(nil)
24
- end
25
- end
26
-
27
- describe "#execution" do
28
- let(:output_queue) { Queue.new }
29
-
30
- context "when faces error while retrying" do
31
- it "retries the given block the specified number of times" do
32
- attempts = 0
33
- result = esql_executor.retryable("Test Job") do
34
- attempts += 1
35
- raise StandardError if attempts < 3
36
- "success"
37
- end
38
- expect(attempts).to eq(3)
39
- expect(result).to eq("success")
40
- end
41
-
42
- it "returns false if the block fails all attempts" do
43
- result = esql_executor.retryable("Test Job") do
44
- raise StandardError
45
- end
46
- expect(result).to eq(false)
47
- end
48
- end
49
-
50
- context "when executing chain of processes" do
51
- let(:response) { { 'values' => [%w[foo bar]], 'columns' => [{ 'name' => 'a.b.1.d', 'type' => 'keyword' },
52
- { 'name' => 'h_g.k$l.m.0', 'type' => 'keyword' }] } }
53
-
54
- before do
55
- allow(esql_executor).to receive(:retryable).and_yield
56
- allow(client).to receive_message_chain(:esql, :query).and_return(response)
57
- end
58
-
59
- it "executes the ESQL query and processes the results" do
60
- allow(response).to receive(:headers).and_return({})
61
- esql_executor.do_run(output_queue, plugin_config["query"])
62
- expect(output_queue.size).to eq(1)
63
-
64
- event = output_queue.pop
65
- expect(event.get('[a][b][1][d]')).to eq('foo')
66
- expect(event.get('[h_g][k$l][m][0]')).to eq('bar')
67
- end
68
-
69
- it "logs a warning if the response contains a warning header" do
70
- allow(response).to receive(:headers).and_return({ "warning" => "some warning" })
71
- expect(esql_executor.logger).to receive(:warn).with("ES|QL executor received warning", { :warning_message => "some warning" })
72
- esql_executor.do_run(output_queue, plugin_config["query"])
73
- end
74
-
75
- it "does not log a warning if the response does not contain a warning header" do
76
- allow(response).to receive(:headers).and_return({})
77
- expect(esql_executor.logger).not_to receive(:warn)
78
- esql_executor.do_run(output_queue, plugin_config["query"])
79
- end
80
- end
81
-
82
- describe "multiple rows in the result" do
83
- let(:response) { { 'values' => rows, 'columns' => [{ 'name' => 'key.1', 'type' => 'keyword' },
84
- { 'name' => 'key.2', 'type' => 'keyword' }] } }
85
-
86
- before do
87
- allow(esql_executor).to receive(:retryable).and_yield
88
- allow(client).to receive_message_chain(:esql, :query).and_return(response)
89
- allow(response).to receive(:headers).and_return({})
90
- end
91
-
92
- context "when mapping" do
93
- let(:rows) { [%w[foo bar], %w[hello world]] }
94
-
95
- it "1:1 maps rows to events" do
96
- esql_executor.do_run(output_queue, plugin_config["query"])
97
- expect(output_queue.size).to eq(2)
98
-
99
- event_1 = output_queue.pop
100
- expect(event_1.get('[key][1]')).to eq('foo')
101
- expect(event_1.get('[key][2]')).to eq('bar')
102
-
103
- event_2 = output_queue.pop
104
- expect(event_2.get('[key][1]')).to eq('hello')
105
- expect(event_2.get('[key][2]')).to eq('world')
106
- end
107
- end
108
-
109
- context "when partial nil values appear" do
110
- let(:rows) { [[nil, "bar"], ["hello", nil]] }
111
-
112
- it "ignores the nil values" do
113
- esql_executor.do_run(output_queue, plugin_config["query"])
114
- expect(output_queue.size).to eq(2)
115
-
116
- event_1 = output_queue.pop
117
- expect(event_1.get('[key][1]')).to eq(nil)
118
- expect(event_1.get('[key][2]')).to eq('bar')
119
-
120
- event_2 = output_queue.pop
121
- expect(event_2.get('[key][1]')).to eq('hello')
122
- expect(event_2.get('[key][2]')).to eq(nil)
123
- end
124
- end
125
- end
126
-
127
- context "when sub-elements occur in the result" do
128
- let(:response) { {
129
- 'values' => [[50, 1, 100], [50, 0, 1000], [50, 9, 99999]],
130
- 'columns' =>
131
- [
132
- { 'name' => 'time', 'type' => 'long' },
133
- { 'name' => 'time.min', 'type' => 'long' },
134
- { 'name' => 'time.max', 'type' => 'long' },
135
- ]
136
- } }
137
-
138
- before do
139
- allow(esql_executor).to receive(:retryable).and_yield
140
- allow(client).to receive_message_chain(:esql, :query).and_return(response)
141
- allow(response).to receive(:headers).and_return({})
142
- end
143
-
144
- it "includes 1st depth elements into event" do
145
- esql_executor.do_run(output_queue, plugin_config["query"])
146
-
147
- expect(output_queue.size).to eq(3)
148
- 3.times do
149
- event = output_queue.pop
150
- expect(event.get('time')).to eq(50)
151
- expect(event.get('[time][min]')).to eq(nil)
152
- expect(event.get('[time][max]')).to eq(nil)
153
- end
154
- end
155
- end
156
- end
157
-
158
- describe "#column spec" do
159
- let(:valid_spec) { { 'name' => 'field.name', 'type' => 'keyword' } }
160
- let(:column_spec) { LogStash::Inputs::Elasticsearch::ColumnSpec.new(valid_spec) }
161
-
162
- context "when initializes" do
163
- it "sets the name and type attributes" do
164
- expect(column_spec.name).to eq("field.name")
165
- expect(column_spec.type).to eq("keyword")
166
- end
167
-
168
- it "freezes the name and type attributes" do
169
- expect(column_spec.name).to be_frozen
170
- expect(column_spec.type).to be_frozen
171
- end
172
- end
173
-
174
- context "when calls the field reference" do
175
- it "returns the correct field reference format" do
176
- expect(column_spec.field_reference).to eq("[field][name]")
177
- end
178
- end
179
- end
180
- end if LOGSTASH_VERSION >= LogStash::Inputs::Elasticsearch::LS_ESQL_SUPPORT_VERSION
@@ -1,150 +0,0 @@
1
- # encoding: utf-8
2
- require "logstash/devutils/rspec/spec_helper"
3
- require "logstash/inputs/elasticsearch"
4
- require "elasticsearch"
5
- require_relative "../../../spec/es_helper"
6
-
7
- describe LogStash::Inputs::Elasticsearch, integration: true do
8
-
9
- SECURE_INTEGRATION = ENV['SECURE_INTEGRATION'].eql? 'true'
10
- ES_HOSTS = ["http#{SECURE_INTEGRATION ? 's' : nil}://#{ESHelper.get_host_port}"]
11
-
12
- let(:plugin) { described_class.new(config) }
13
- let(:es_index) { "logstash-esql-integration-#{rand(1000)}" }
14
- let(:test_documents) do
15
- [
16
- { "message" => "test message 1", "type" => "a", "count" => 1 },
17
- { "message" => "test message 2", "type" => "a", "count" => 2 },
18
- { "message" => "test message 3", "type" => "b", "count" => 3 },
19
- { "message" => "test message 4", "type" => "b", "count" => 4 },
20
- { "message" => "test message 5", "type" => "c", "count" => 5 }
21
- ]
22
- end
23
- let(:config) do
24
- {
25
- "hosts" => ES_HOSTS,
26
- "query_type" => "esql"
27
- }
28
- end
29
- let(:es_client) do
30
- Elasticsearch::Client.new(hosts: ES_HOSTS)
31
- end
32
-
33
- before(:all) do
34
- is_ls_with_esql_supported_client = Gem::Version.create(LOGSTASH_VERSION) >= Gem::Version.create(LogStash::Inputs::Elasticsearch::LS_ESQL_SUPPORT_VERSION)
35
- skip "LS version does not have ES client which supports ES|QL" unless is_ls_with_esql_supported_client
36
-
37
- # Skip tests if ES version doesn't support ES||QL
38
- es_client = Elasticsearch::Client.new(hosts: ES_HOSTS) # need to separately create since let isn't allowed in before(:context)
39
- es_version_info = es_client.info["version"]
40
- es_gem_version = Gem::Version.create(es_version_info["number"])
41
- skip "ES version does not support ES|QL" if es_gem_version.nil? || es_gem_version < Gem::Version.create(LogStash::Inputs::Elasticsearch::ES_ESQL_SUPPORT_VERSION)
42
- end
43
-
44
- before(:each) do
45
- # Create index with test documents
46
- es_client.indices.create(index: es_index, body: {}) unless es_client.indices.exists?(index: es_index)
47
-
48
- test_documents.each do |doc|
49
- es_client.index(index: es_index, body: doc, refresh: true)
50
- end
51
- end
52
-
53
- after(:each) do
54
- es_client.indices.delete(index: es_index) if es_client.indices.exists?(index: es_index)
55
- end
56
-
57
- context "#run ES|QL queries" do
58
-
59
- before do
60
- stub_const("LOGSTASH_VERSION", LogStash::Inputs::Elasticsearch::LS_ESQL_SUPPORT_VERSION)
61
- allow_any_instance_of(LogStash::Inputs::Elasticsearch).to receive(:exit_plugin?).and_return false, true
62
- end
63
-
64
- before(:each) do
65
- plugin.register
66
- end
67
-
68
- shared_examples "ESQL query execution" do |expected_count|
69
- it "correctly retrieves documents" do
70
- queue = Queue.new
71
- plugin.run(queue)
72
-
73
- event_count = 0
74
- expected_count.times do |i|
75
- event = queue.pop
76
- expect(event).to be_a(LogStash::Event)
77
- event_count += 1
78
- end
79
- expect(event_count).to eq(expected_count)
80
- end
81
- end
82
-
83
- context "#FROM query" do
84
- let(:config) do
85
- super().merge("query" => "FROM #{es_index} | SORT count")
86
- end
87
-
88
- include_examples "ESQL query execution", 5
89
- end
90
-
91
- context "#FROM query and WHERE clause" do
92
- let(:config) do
93
- super().merge("query" => "FROM #{es_index} | WHERE type == \"a\" | SORT count")
94
- end
95
-
96
- include_examples "ESQL query execution", 2
97
- end
98
-
99
- context "#STATS aggregation" do
100
- let(:config) do
101
- super().merge("query" => "FROM #{es_index} | STATS avg(count) BY type")
102
- end
103
-
104
- it "retrieves aggregated stats" do
105
- queue = Queue.new
106
- plugin.run(queue)
107
- results = []
108
- 3.times do
109
- event = queue.pop
110
- expect(event).to be_a(LogStash::Event)
111
- results << event.get("avg(count)")
112
- end
113
-
114
- expected_averages = [1.5, 3.5, 5.0]
115
- expect(results.sort).to eq(expected_averages)
116
- end
117
- end
118
-
119
- context "#METADATA" do
120
- let(:config) do
121
- super().merge("query" => "FROM #{es_index} METADATA _index, _id, _version | DROP message.keyword, type.keyword | SORT count")
122
- end
123
-
124
- it "includes document metadata" do
125
- queue = Queue.new
126
- plugin.run(queue)
127
-
128
- 5.times do
129
- event = queue.pop
130
- expect(event).to be_a(LogStash::Event)
131
- expect(event.get("_index")).not_to be_nil
132
- expect(event.get("_id")).not_to be_nil
133
- expect(event.get("_version")).not_to be_nil
134
- end
135
- end
136
- end
137
-
138
- context "#invalid ES|QL query" do
139
- let(:config) do
140
- super().merge("query" => "FROM undefined index | LIMIT 1")
141
- end
142
-
143
- it "doesn't produce events" do
144
- queue = Queue.new
145
- plugin.run(queue)
146
- expect(queue.empty?).to eq(true)
147
- end
148
- end
149
- end
150
- end