stretchy-model 0.6.6 → 0.7.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (57) hide show
  1. checksums.yaml +4 -4
  2. data/docs/_sidebar.md +2 -1
  3. data/docs/examples/_sidebar.md +1 -1
  4. data/docs/examples/neural_search_with_llm.md +381 -0
  5. data/docs/guides/_sidebar.md +2 -1
  6. data/lib/elasticsearch/api/actions/connector/check_in.rb +64 -0
  7. data/lib/elasticsearch/api/actions/connector/delete.rb +64 -0
  8. data/lib/elasticsearch/api/actions/connector/get.rb +64 -0
  9. data/lib/elasticsearch/api/actions/connector/last_sync.rb +66 -0
  10. data/lib/elasticsearch/api/actions/connector/list.rb +60 -0
  11. data/lib/elasticsearch/api/actions/connector/post.rb +57 -0
  12. data/lib/elasticsearch/api/actions/connector/put.rb +66 -0
  13. data/lib/elasticsearch/api/actions/connector/update_api_key_id.rb +66 -0
  14. data/lib/elasticsearch/api/actions/connector/update_configuration.rb +66 -0
  15. data/lib/elasticsearch/api/actions/connector/update_error.rb +66 -0
  16. data/lib/elasticsearch/api/actions/connector/update_filtering.rb +66 -0
  17. data/lib/elasticsearch/api/actions/connector/update_index_name.rb +66 -0
  18. data/lib/elasticsearch/api/actions/connector/update_name.rb +66 -0
  19. data/lib/elasticsearch/api/actions/connector/update_native.rb +66 -0
  20. data/lib/elasticsearch/api/actions/connector/update_pipeline.rb +66 -0
  21. data/lib/elasticsearch/api/actions/connector/update_scheduling.rb +66 -0
  22. data/lib/elasticsearch/api/actions/connector/update_service_type.rb +66 -0
  23. data/lib/elasticsearch/api/actions/connector/update_status.rb +66 -0
  24. data/lib/elasticsearch/api/namespace/connector.rb +36 -0
  25. data/lib/opensearch/api/actions/machine_learning/connector/delete.rb +42 -0
  26. data/lib/opensearch/api/actions/machine_learning/connector/get.rb +42 -0
  27. data/lib/opensearch/api/actions/machine_learning/connector/list.rb +38 -0
  28. data/lib/opensearch/api/actions/machine_learning/connector/post.rb +35 -0
  29. data/lib/opensearch/api/actions/machine_learning/connector/put.rb +44 -0
  30. data/lib/opensearch/api/actions/machine_learning/models/predict.rb +32 -0
  31. data/lib/opensearch/api/namespace/connector.rb +19 -0
  32. data/lib/stretchy/machine_learning/connector.rb +130 -0
  33. data/lib/stretchy/machine_learning/errors.rb +25 -0
  34. data/lib/stretchy/machine_learning/model.rb +162 -109
  35. data/lib/stretchy/machine_learning/registry.rb +19 -0
  36. data/lib/stretchy/open_search_compatibility.rb +2 -0
  37. data/lib/stretchy/pipelines/processor.rb +2 -0
  38. data/lib/stretchy/rails/railtie.rb +11 -0
  39. data/lib/stretchy/rails/tasks/connector/create.rake +32 -0
  40. data/lib/stretchy/rails/tasks/connector/delete.rake +27 -0
  41. data/lib/stretchy/rails/tasks/connector/status.rake +31 -0
  42. data/lib/stretchy/rails/tasks/connector/update.rake +32 -0
  43. data/lib/stretchy/rails/tasks/index/create.rake +28 -0
  44. data/lib/stretchy/rails/tasks/index/delete.rake +27 -0
  45. data/lib/stretchy/rails/tasks/index/status.rake +23 -0
  46. data/lib/stretchy/rails/tasks/ml/delete.rake +25 -0
  47. data/lib/stretchy/rails/tasks/ml/deploy.rake +78 -0
  48. data/lib/stretchy/rails/tasks/ml/status.rake +31 -0
  49. data/lib/stretchy/rails/tasks/pipeline/create.rake +27 -0
  50. data/lib/stretchy/rails/tasks/pipeline/delete.rake +26 -0
  51. data/lib/stretchy/rails/tasks/pipeline/status.rake +25 -0
  52. data/lib/stretchy/rails/tasks/status.rake +15 -0
  53. data/lib/stretchy/rails/tasks/stretchy.rake +42 -0
  54. data/lib/stretchy/version.rb +1 -1
  55. data/lib/stretchy.rb +7 -0
  56. metadata +62 -3
  57. data/docs/examples/semantic_search_with_llm.md +0 -83
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: ff8d7515eb6f795cf92989ba14d3dd404bb716378896ca2da4054bca98e467f4
4
- data.tar.gz: 2361d39fcacb927f7d62bfe2105054c0923cd6b20af3cd7724c2bc9d784d42e5
3
+ metadata.gz: a741abc938cc22041b3aa0931f0a390b87262878402020fa095cbc47b35b6365
4
+ data.tar.gz: 87e565ac9cf56e341ff3541492d2ae32ba01a391ef745a74390789531e5a0cb9
5
5
  SHA512:
6
- metadata.gz: 410eb69810533c4a12fe12b1bdd5ea6df9b320f9c1953a32f924487b02f1825296765e5a5451abf1c22491a6d9e8a1741e4e7ef01789acbac0026e1db423602d
7
- data.tar.gz: 9afdb5b058a307c3b64f8043f1e4f2348973050b6b5c802726031f060a44ac11003633ba816ab278ba204c1a4946a12f0c0c7171fd72cf0188493c43b3e0d4df
6
+ metadata.gz: 72ac4716758a0b393f65cb928ecb7963b16f95c52865acba04f26b4356f5e0cb24d468d13d1f1f7e6aba6c5a8bc32c3db232796ab09c20136b174f9575934ae8
7
+ data.tar.gz: 577e03534c365c9746d3ddf1eba3263397752e5265d868268e78c1fc9851a490f31d953ace7ba7ab113d65493dc32b918720b75f5c13ad30a32d31f0597388eb
data/docs/_sidebar.md CHANGED
@@ -11,4 +11,5 @@
11
11
 
12
12
  * __Examples__
13
13
  * [Data Analysis](examples/data_analysis)
14
- * [Simple Ingest Pipeline](examples/simple-ingest-pipeline)
14
+ * [Simple Ingest Pipeline](examples/simple-ingest-pipeline)
15
+ * [Neural Search with LLM](examples/neural_search_with_llm)
@@ -12,4 +12,4 @@
12
12
  * __Examples__
13
13
  * [Data Analysis](examples/data_analysis)
14
14
  * [Simple Ingest Pipeline](examples/simple-ingest-pipeline?id=simple-ingest-pipeline)
15
- * [Semantic Search with LLMs](examples/semantic_search_with_llm)
15
+ * [Neural Search with LLM](examples/neural_search_with_llm?id=neural-search-with-llm-expert)
@@ -0,0 +1,381 @@
1
+ # Neural Search with LLM Expert
2
+
3
+ This guide provides a comprehensive walkthrough on creating a Retrieval-Augmented Generation (RAG) expert system utilizing stretchy-model, OpenSearch, and OpenAI. By integrating these technologies, you'll develop an application capable of understanding and answering complex questions with context derived from its own data.
4
+
5
+ The process involves several key steps:
6
+
7
+ - **Environment Setup:** Initiating a Rails application and integrating necessary gems.
8
+ - **Configuration:** Setting credentials for OpenSearch and OpenAI.
9
+ - **Model and Pipeline Creation:** Defining models and pipelines for handling and indexing data.
10
+ - **Machine Learning Integration:** Leveraging OpenAI's GPT model for generating responses based on the retrieved context.
11
+ - **Deployment:** Deploying models and verifying the setup.
12
+ - **Ingestion:** Ingesting a git repository.
13
+ - **Interaction:** Asking questions about the data.
14
+
15
+ ## Create Rails Application
16
+
17
+ ```
18
+ rails new stretchy-rails-app
19
+ ```
20
+
21
+ ```
22
+ cd stretchy-rails-app
23
+ ```
24
+
25
+ ```
26
+ bundle add opensearch-ruby stretchy-model github-linguist
27
+ ```
28
+
29
+ ### Set Credentials
30
+
31
+ ```
32
+ rails credentials:edit
33
+ ```
34
+
35
+ ```yaml
36
+ opensearch:
37
+ host: https://localhost:9200
38
+ user: admin
39
+ password: admin
40
+ transport_options:
41
+ ssl:
42
+ verify: false
43
+
44
+ openai:
45
+ openAI_key: <open_ai_key>
46
+ ```
47
+
48
+ ### Initialize Stretchy
49
+
50
+ _config/initializers/stretchy.rb_ 
51
+ ```ruby
52
+ Stretchy.configure do |config|
53
+ config.client = OpenSearch::Client.new Rails.application.credentials.opensearch
54
+ end
55
+ ```
56
+
57
+ ### Start OpenSearch
58
+
59
+ >[!WARNING]
60
+ >This example deploys multiple Machine Learning models and will need a node for each. Follow the [multi-node docker instructions](https://gist.github.com/esmarkowski/7f3ec9bfb3b0dc3604112b67410067e7) to ensure you can fully deploy by the end of the example.
61
+ >
62
+
63
+ ```
64
+ docker-compose -f opensearch/ml-compose.yml up
65
+ ```
66
+
67
+ ## Define Models
68
+
69
+ ### RepoFile
70
+
71
+ *app/models/repo_file.rb*
72
+ ```ruby
73
+ class RepoFile < StretchyModel
74
+ attribute :content, :text
75
+ attribute :file_name, :string
76
+ attribute :embeddings, :knn_vector, dimension: 384
77
+
78
+ default_pipeline :text_embedding_pipeline
79
+
80
+ index_settings(
81
+ 'knn.space_type': :cosinesimil,
82
+ knn: true
83
+ )
84
+
85
+ end
86
+ ```
87
+
88
+
89
+ ### Text Embedding Model
90
+
91
+ *app/machine_learning/text_embedding_model.rb*
92
+ ```ruby
93
+ class TextEmbeddingModel < Stretchy::MachineLearning::Model
94
+
95
+ model :sentence_transformers_minilm_12
96
+ model_format 'TORCH_SCRIPT'
97
+ version '1.0.1'
98
+
99
+ end
100
+ ```
101
+
102
+ ## Ingest Pipeline
103
+
104
+ ### Text Embedding Pipeline
105
+
106
+ *app/pipelines/ingest/text_embedding_pipeline.rb*
107
+ ```ruby
108
+ module Ingest
109
+ class TextEmbeddingPipeline < Stretchy::Pipeline
110
+
111
+ description "KNN text embedding pipeline"
112
+
113
+ processor :text_embedding,
114
+ field_map: {
115
+ content: :embeddings
116
+ },
117
+ model: TextEmbeddingModel
118
+
119
+ end
120
+ end
121
+ ```
122
+
123
+ ## Configure LLM
124
+
125
+ ## Create a connector
126
+
127
+ ```ruby
128
+ module Connectors
129
+ class GPTConnector < Stretchy::MachineLearning::Connector
130
+
131
+ description "The connector to OpenAI's gpt-3.5-turbo service for gpt model"
132
+
133
+ version 1
134
+
135
+ protocol "http"
136
+
137
+ credentials Rails.application.credentials.dig(:openai)
138
+
139
+ parameters endpoint: "api.openai.com",
140
+ model: 'gpt-3.5-turbo'
141
+
142
+ actions action_type: "predict",
143
+ method: "POST",
144
+ url: "https://${parameters.endpoint}/v1/chat/completions",
145
+ headers: {
146
+ "Authorization": "Bearer ${credential.openAI_key}"
147
+ },
148
+ request_body: "{\"model\":\"${parameters.model}\",\"messages\": ${parameters.messages}}"
149
+
150
+ end
151
+ end
152
+ ```
153
+
154
+ ### Create an LLM Model
155
+
156
+ ```ruby
157
+ module Models
158
+ class GPT < Stretchy::MachineLearning::Model
159
+
160
+ model_name 'gpt'
161
+ function_name :remote
162
+ connector 'Connectors::GPTConnector'
163
+
164
+ def self.predict(prompt)
165
+ response = client.predict(model_id: self.model_id, body: prompt)
166
+ response.dig('inference_results')
167
+ .first.dig('output')
168
+ .first.dig('dataAsMap', 'choices')
169
+ .first.dig('message', 'content')
170
+ end
171
+ end
172
+ end
173
+ ```
174
+
175
+ ### Define an expert
176
+
177
+ ```ruby
178
+ class Expert
179
+
180
+ BEHAVIOR = "You are an expert in Ruby, Ruby on Rails, Elasticsearch and Opensearch. You read documentation and provide succinct and direct answers to the questions provided using the context provided. If the answer is not directly shown in the context, you will analyze the data and find the answer. If you don't know the answer, just say you don't know."
181
+
182
+ def get_context(question, k=2)
183
+ RepoFile.neural(
184
+ embeddings: question,
185
+ model_id: TextEmbeddingModel.model_id,
186
+ k: k
187
+ ).pluck(:content)
188
+ end
189
+
190
+ def ask(question)
191
+ prompt = {
192
+ parameters: {
193
+ messages: [
194
+ {
195
+ "role": "system",
196
+ "content": BEHAVIOR
197
+ },
198
+ {
199
+ "role": "assistant",
200
+ "content": get_context(question).join("\n")
201
+ },
202
+ {
203
+ "role": "user",
204
+ "content": question
205
+ }
206
+ ]
207
+ }
208
+ }
209
+
210
+ Models::GPT.predict(prompt)
211
+ end
212
+ end
213
+
214
+ ```
215
+
216
+ ## Deploy Models
217
+
218
+ Stretchy includes rake tasks to aid in managing your resources. Running `rake stretchy:up` will ensure dependencies are handled by performing the following steps:
219
+ - Create `Connectors`
220
+ - Register and deploy `MachineLearning::Models`
221
+ - Create `Pipelines`
222
+ - Create indexes for all `StretchyModels`
223
+
224
+ > [!INFO|style:flat|label:Machine Learning Nodes]
225
+ > If you do not have dedicated machine learning nodes (or are running a single-node cluster) you'll need to enable machine learning on all nodes.
226
+ >
227
+ > ```
228
+ > rake stretchy:ml_on_all_nodes
229
+ > ```
230
+ >
231
+
232
+ Run the following to start the deployment:
233
+
234
+ ```
235
+ rake stretchy:up
236
+ ```
237
+
238
+ ![[docs/media/stretchy_up.mov]]
239
+
240
+ >[!TIP]
241
+ >Registering and deploying machine learning models can take some time.
242
+ >Once it's complete you can confirm the status with `rake stretchy:status`
243
+ >
244
+ > If you'd like to explore other rake tasks available run `rake -T | grep stretchy`
245
+
246
+
247
+ ## Create a Source
248
+ We create a simple source that pulls a git repository and allows us to index it into `RepoFile`.
249
+
250
+ *app/models/sources/git.rb*
251
+ ```ruby
252
+ require 'open3'
253
+
254
+ module Sources
255
+ class Git
256
+
257
+ attr_reader :repo_url, :repo_name, :path
258
+ attr_accessor :errors
259
+
260
+ def initialize(repo_url, path: '/tmp')
261
+ @repo_url = repo_url
262
+ @repo_name = extract_repo_name(repo_url)
263
+ @path = path
264
+ end
265
+
266
+ # Perform the git clone and ingest
267
+ def perform(&block)
268
+ clone unless File.directory?("#{path}/#{repo_name}")
269
+ ingest(repo_url, &block)
270
+ end
271
+
272
+ # Clone the repo to path
273
+ def clone
274
+ clone_cmd = "git clone #{repo_url} #{path}/#{repo_name}"
275
+ Open3.popen2e(clone_cmd) do |stdin, stdout_err, wait_thr|
276
+ while line = stdout_err.gets
277
+ Rails.logger.debug line
278
+ end
279
+ exit_status = wait_thr.value
280
+ unless exit_status.success?
281
+ raise "FAILED to clone #{repo_url}"
282
+ end
283
+ end
284
+ end
285
+
286
+
287
+ # Remove the repo from the path
288
+ def remove
289
+ FileUtils.rm_rf("#{path}/#{repo_name}")
290
+ end
291
+
292
+ # Recursively crawl each file in the repo and yield the file to the block
293
+ def ingest(repo_url, &block)
294
+ @errors = []
295
+ Dir.glob("#{path}/#{repo_name}/**/*").each do |file|
296
+ if File.file?(file)
297
+ begin
298
+ yield file, self if block_given?
299
+ rescue => e
300
+ errors << [file, e]
301
+ end
302
+ end
303
+ end
304
+
305
+ return true
306
+ end
307
+
308
+ def errors?
309
+ errors.any?
310
+ end
311
+
312
+ private
313
+
314
+ def extract_repo_name(url)
315
+ url.split('/').last.gsub('.git', '')
316
+ end
317
+
318
+ end
319
+ end
320
+
321
+ ```
322
+
323
+ ## Ingest Data
324
+
325
+ Start Rails console with `rails console` and run the following code.
326
+
327
+ This will checkout the repo and bulk index the file contents into `RepoFile`, automatically using the `TextEmbeddingPipeline` to create embeddings during ingest.
328
+
329
+ ```ruby
330
+ source = Sources::Git.new('https://github.com/theablefew/stretchy.git')
331
+
332
+ repo_files = source.perform do |file, instance|
333
+ next unless ['.md', '.rb'].include?(File.extname(file))
334
+
335
+ RepoFile.new(
336
+ content: File.read(file),
337
+ file_name: File.basename(file),
338
+ )
339
+ end.compact
340
+
341
+ RepoFile.bulk_in_batches(repo_files, size: 100) do |batch|
342
+ batch.map! { |record | record.to_bulk }
343
+ end
344
+ ```
345
+
346
+
347
+ > [!TIP|label:Test it out]
348
+ >
349
+ > We can now perform semantic searches using `neural` search.
350
+ >
351
+ > ```ruby
352
+ > RepoFile.neural(
353
+ > embeddings: "How do I perform a percentile rank aggregation?",
354
+ > model_id: TextEmbeddingModel.model_id,
355
+ > k: 2
356
+ > )
357
+ > ```
358
+ > This is how our `Expert` will gather relevant context to pass to `GPT` LLM.
359
+
360
+ ## Interact
361
+
362
+ ```ruby
363
+ expert = Expert.new
364
+ expert.ask("How can I perform a percentile ranks aggregation?")
365
+ ```
366
+
367
+ We’ve asked how to perform a percentile ranks aggregation and provided our `Expert` relevant documentation as context with a `neural` search.
368
+
369
+ > [!INFO|label:LLM Response]
370
+ > You can perform a percentile ranks aggregation in Stretchy by using the `percentile_ranks` method. Here is an example to calculate the percentile ranks for values `[1, 2, 3]` on the field `'field_name'`:
371
+ >
372
+ > ```ruby
373
+ > Model.percentile_ranks(:my_agg, {field: 'field_name', values: [1, 2, 3]})
374
+ > ```
375
+ >
376
+ > This method will calculate the percentile ranks for the specified values on the specified field.
377
+
378
+
379
+ ### Conclusion
380
+
381
+ By following this guide, you've created a powerful expert system capable of providing informed responses to complex questions. This integration not only demonstrates the potential of combining semantic search with generative AI but also provides a solid foundation for further exploration and development.
@@ -11,4 +11,5 @@
11
11
 
12
12
  * __Examples__
13
13
  * [Data Analysis](examples/data_analysis)
14
- * [Simple Ingest Pipeline](examples/simple-ingest-pipeline)
14
+ * [Simple Ingest Pipeline](examples/simple-ingest-pipeline)
15
+ * [Neural Search with LLM](examples/neural_search_with_llm)
@@ -0,0 +1,64 @@
1
+ # Licensed to Elasticsearch B.V. under one or more contributor
2
+ # license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright
4
+ # ownership. Elasticsearch B.V. licenses this file to you under
5
+ # the Apache License, Version 2.0 (the "License"); you may
6
+ # not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ #
18
+ # Auto generated from build hash f284cc16f4d4b4289bc679aa1529bb504190fe80
19
+ # @see https://github.com/elastic/elasticsearch/tree/main/rest-api-spec
20
+ #
21
+ module Elasticsearch
22
+ module API
23
+ module Connector
24
+ module Actions
25
+ # Updates the last_seen timestamp in the connector document.
26
+ # This functionality is Experimental and may be changed or removed
27
+ # completely in a future release. Elastic will take a best effort approach
28
+ # to fix any issues, but experimental features are not subject to the
29
+ # support SLA of official GA features.
30
+ #
31
+ # @option arguments [String] :connector_id The unique identifier of the connector to be updated.
32
+ # @option arguments [Hash] :headers Custom HTTP headers
33
+ #
34
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/check-in-connector-api.html
35
+ #
36
+ def check_in(arguments = {})
37
+ request_opts = { endpoint: arguments[:endpoint] || 'connector.check_in' }
38
+
39
+ defined_params = [:connector_id].each_with_object({}) do |variable, set_variables|
40
+ set_variables[variable] = arguments[variable] if arguments.key?(variable)
41
+ end
42
+ request_opts[:defined_params] = defined_params unless defined_params.empty?
43
+
44
+ raise ArgumentError, "Required argument 'connector_id' missing" unless arguments[:connector_id]
45
+
46
+ arguments = arguments.clone
47
+ headers = arguments.delete(:headers) || {}
48
+
49
+ body = nil
50
+
51
+ _connector_id = arguments.delete(:connector_id)
52
+
53
+ method = Elasticsearch::API::HTTP_PUT
54
+ path = "_connector/#{Utils.__listify(_connector_id)}/_check_in"
55
+ params = {}
56
+
57
+ Elasticsearch::API::Response.new(
58
+ perform_request(method, path, params, body, headers, request_opts)
59
+ )
60
+ end
61
+ end
62
+ end
63
+ end
64
+ end
@@ -0,0 +1,64 @@
1
+ # Licensed to Elasticsearch B.V. under one or more contributor
2
+ # license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright
4
+ # ownership. Elasticsearch B.V. licenses this file to you under
5
+ # the Apache License, Version 2.0 (the "License"); you may
6
+ # not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ #
18
+ # Auto generated from build hash f284cc16f4d4b4289bc679aa1529bb504190fe80
19
+ # @see https://github.com/elastic/elasticsearch/tree/main/rest-api-spec
20
+ #
21
+ module Elasticsearch
22
+ module API
23
+ module Connector
24
+ module Actions
25
+ # Deletes a connector.
26
+ # This functionality is Experimental and may be changed or removed
27
+ # completely in a future release. Elastic will take a best effort approach
28
+ # to fix any issues, but experimental features are not subject to the
29
+ # support SLA of official GA features.
30
+ #
31
+ # @option arguments [String] :connector_id The unique identifier of the connector to be deleted.
32
+ # @option arguments [Hash] :headers Custom HTTP headers
33
+ #
34
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-connector-api.html
35
+ #
36
+ def delete(arguments = {})
37
+ request_opts = { endpoint: arguments[:endpoint] || 'connector.delete' }
38
+
39
+ defined_params = [:connector_id].each_with_object({}) do |variable, set_variables|
40
+ set_variables[variable] = arguments[variable] if arguments.key?(variable)
41
+ end
42
+ request_opts[:defined_params] = defined_params unless defined_params.empty?
43
+
44
+ raise ArgumentError, "Required argument 'connector_id' missing" unless arguments[:connector_id]
45
+
46
+ arguments = arguments.clone
47
+ headers = arguments.delete(:headers) || {}
48
+
49
+ body = nil
50
+
51
+ _connector_id = arguments.delete(:connector_id)
52
+
53
+ method = Elasticsearch::API::HTTP_DELETE
54
+ path = "_connector/#{Utils.__listify(_connector_id)}"
55
+ params = {}
56
+
57
+ Elasticsearch::API::Response.new(
58
+ perform_request(method, path, params, body, headers, request_opts)
59
+ )
60
+ end
61
+ end
62
+ end
63
+ end
64
+ end
@@ -0,0 +1,64 @@
1
+ # Licensed to Elasticsearch B.V. under one or more contributor
2
+ # license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright
4
+ # ownership. Elasticsearch B.V. licenses this file to you under
5
+ # the Apache License, Version 2.0 (the "License"); you may
6
+ # not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ #
18
+ # Auto generated from build hash f284cc16f4d4b4289bc679aa1529bb504190fe80
19
+ # @see https://github.com/elastic/elasticsearch/tree/main/rest-api-spec
20
+ #
21
+ module Elasticsearch
22
+ module API
23
+ module Connector
24
+ module Actions
25
+ # Returns the details about a connector.
26
+ # This functionality is Experimental and may be changed or removed
27
+ # completely in a future release. Elastic will take a best effort approach
28
+ # to fix any issues, but experimental features are not subject to the
29
+ # support SLA of official GA features.
30
+ #
31
+ # @option arguments [String] :connector_id The unique identifier of the connector to be returned.
32
+ # @option arguments [Hash] :headers Custom HTTP headers
33
+ #
34
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/get-connector-api.html
35
+ #
36
+ def get(arguments = {})
37
+ request_opts = { endpoint: arguments[:endpoint] || 'connector.get' }
38
+
39
+ defined_params = [:connector_id].each_with_object({}) do |variable, set_variables|
40
+ set_variables[variable] = arguments[variable] if arguments.key?(variable)
41
+ end
42
+ request_opts[:defined_params] = defined_params unless defined_params.empty?
43
+
44
+ raise ArgumentError, "Required argument 'connector_id' missing" unless arguments[:connector_id]
45
+
46
+ arguments = arguments.clone
47
+ headers = arguments.delete(:headers) || {}
48
+
49
+ body = nil
50
+
51
+ _connector_id = arguments.delete(:connector_id)
52
+
53
+ method = Elasticsearch::API::HTTP_GET
54
+ path = "_connector/#{Utils.__listify(_connector_id)}"
55
+ params = {}
56
+
57
+ Elasticsearch::API::Response.new(
58
+ perform_request(method, path, params, body, headers, request_opts)
59
+ )
60
+ end
61
+ end
62
+ end
63
+ end
64
+ end
@@ -0,0 +1,66 @@
1
+ # Licensed to Elasticsearch B.V. under one or more contributor
2
+ # license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright
4
+ # ownership. Elasticsearch B.V. licenses this file to you under
5
+ # the Apache License, Version 2.0 (the "License"); you may
6
+ # not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ #
18
+ # Auto generated from build hash f284cc16f4d4b4289bc679aa1529bb504190fe80
19
+ # @see https://github.com/elastic/elasticsearch/tree/main/rest-api-spec
20
+ #
21
+ module Elasticsearch
22
+ module API
23
+ module Connector
24
+ module Actions
25
+ # Updates the stats of last sync in the connector document.
26
+ # This functionality is Experimental and may be changed or removed
27
+ # completely in a future release. Elastic will take a best effort approach
28
+ # to fix any issues, but experimental features are not subject to the
29
+ # support SLA of official GA features.
30
+ #
31
+ # @option arguments [String] :connector_id The unique identifier of the connector to be updated.
32
+ # @option arguments [Hash] :headers Custom HTTP headers
33
+ # @option arguments [Hash] :body Object with stats related to the last connector sync run. (*Required*)
34
+ #
35
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/update-connector-last-sync-api.html
36
+ #
37
+ def last_sync(arguments = {})
38
+ request_opts = { endpoint: arguments[:endpoint] || 'connector.last_sync' }
39
+
40
+ defined_params = [:connector_id].each_with_object({}) do |variable, set_variables|
41
+ set_variables[variable] = arguments[variable] if arguments.key?(variable)
42
+ end
43
+ request_opts[:defined_params] = defined_params unless defined_params.empty?
44
+
45
+ raise ArgumentError, "Required argument 'body' missing" unless arguments[:body]
46
+ raise ArgumentError, "Required argument 'connector_id' missing" unless arguments[:connector_id]
47
+
48
+ arguments = arguments.clone
49
+ headers = arguments.delete(:headers) || {}
50
+
51
+ body = arguments.delete(:body)
52
+
53
+ _connector_id = arguments.delete(:connector_id)
54
+
55
+ method = Elasticsearch::API::HTTP_PUT
56
+ path = "_connector/#{Utils.__listify(_connector_id)}/_last_sync"
57
+ params = {}
58
+
59
+ Elasticsearch::API::Response.new(
60
+ perform_request(method, path, params, body, headers, request_opts)
61
+ )
62
+ end
63
+ end
64
+ end
65
+ end
66
+ end