schema-tools 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,539 @@
1
+ require 'json'
2
+ require 'time'
3
+
4
+ module Seed
5
+ # Word list for generating realistic text content
6
+ WORD_LIST = %w[
7
+ lorem ipsum dolor sit amet consectetur adipiscing elit sed do eiusmod tempor
8
+ incididunt ut labore et dolore magna aliqua enim ad minim veniam quis nostrud
9
+ exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat duis aute
10
+ irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat
11
+ nulla pariatur excepteur sint occaecat cupidatat non proident sunt in culpa
12
+ qui officia deserunt mollit anim id est laborum search engine data ruby
13
+ document index mapping schema elasticsearch opensearch cluster node shard
14
+ replica primary secondary analysis tokenizer filter analyzer query filter
15
+ aggregation pipeline script painless groovy mustache template kibana
16
+ logstash beats metricbeat filebeat packetbeat heartbeat auditbeat
17
+ functionbeat winlogbeat journalbeat apm agent apm server fleet agent
18
+ policy enrollment token integration package endpoint security detection
19
+ rule machine learning anomaly detection forecasting classification
20
+ regression clustering outlier detection natural language processing
21
+ vector search semantic search neural search transformer embedding
22
+ vector database similarity search recommendation system personalization
23
+ real-time streaming batch processing event sourcing cqrs microservices
24
+ distributed system scalability performance optimization monitoring
25
+ observability logging metrics tracing alerting notification dashboard
26
+ visualization reporting analytics business intelligence data science
27
+ machine learning artificial intelligence deep learning neural network
28
+ algorithm model training inference prediction classification regression
29
+ clustering dimensionality reduction feature engineering data preprocessing
30
+ validation testing deployment production staging development environment
31
+ configuration management version control continuous integration continuous
32
+ deployment devops infrastructure as code containerization orchestration
33
+ kubernetes docker swarm mesos nomad consul etcd zookeeper redis memcached
34
+ rabbitmq kafka pulsar nats jetstream grpc rest api graphql websocket
35
+ http https tls ssl certificate authentication authorization oauth jwt
36
+ saml ldap active directory kerberos rbac abac policy enforcement
37
+ compliance governance security audit vulnerability assessment penetration
38
+ testing threat modeling risk management incident response disaster recovery
39
+ backup restore high availability fault tolerance load balancing auto-scaling
40
+ horizontal scaling vertical scaling sharding partitioning replication
41
+ consistency eventual consistency strong consistency cap theorem acid
42
+ base distributed consensus raft paxos byzantine fault tolerance
43
+ ].freeze
44
+
45
+ def self.seed_data(num_docs, mappings_json, client, index_name)
46
+ puts "Seeding #{num_docs} documents to index: #{index_name}"
47
+
48
+ # Parse the mappings to understand the schema
49
+ schema = parse_mappings(mappings_json)
50
+ puts "Parsed schema with #{schema.keys.length} top-level fields"
51
+
52
+ # Generate documents in batches for efficiency
53
+ # Reduced batch size to avoid circuit breaker issues with large documents
54
+ batch_size = 25 # Reduced from 100 to 25 for large documents
55
+ total_batches = (num_docs.to_f / batch_size).ceil
56
+
57
+ (1..total_batches).each do |batch_num|
58
+ docs_in_batch = [batch_size, num_docs - (batch_num - 1) * batch_size].min
59
+ puts "Generating batch #{batch_num}/#{total_batches} (#{docs_in_batch} documents)..."
60
+
61
+ documents = generate_document_batch(docs_in_batch, schema)
62
+
63
+ puts "Indexing batch #{batch_num}..."
64
+ begin
65
+ response = client.bulk_index(documents, index_name)
66
+
67
+ # Check for errors in bulk response
68
+ if response['errors']
69
+ error_items = response['items'].select { |item| item.dig('index', 'status') >= 400 }
70
+ error_count = error_items.length
71
+ if error_count > 0
72
+ puts "WARN: #{error_count} documents failed to index in batch #{batch_num}"
73
+
74
+ # Print first few errors for debugging
75
+ error_items.first(3).each_with_index do |item, index|
76
+ error_info = item.dig('index', 'error')
77
+ if error_info
78
+ puts " Error #{index + 1}: #{error_info['type']} - #{error_info['reason']}"
79
+ if error_info['caused_by']
80
+ puts " Caused by: #{error_info['caused_by']['type']} - #{error_info['caused_by']['reason']}"
81
+ end
82
+ end
83
+ end
84
+
85
+ if error_count > 3
86
+ puts " ... and #{error_count - 3} more errors"
87
+ end
88
+ end
89
+ end
90
+
91
+ puts "Successfully indexed batch #{batch_num}"
92
+ rescue => e
93
+ if e.message.include?('circuit_breaking_exception') || e.message.include?('HTTP 429')
94
+ puts "ERROR: Circuit breaker triggered - OpenSearch cluster is out of memory"
95
+ puts "Consider:"
96
+ puts " 1. Reducing batch size further (currently #{batch_size})"
97
+ puts " 2. Increasing OpenSearch heap size"
98
+ puts " 3. Reducing document size/complexity"
99
+ puts " 4. Adding delays between batches"
100
+ puts ""
101
+ puts "Batch #{batch_num} failed: #{e.message}"
102
+ raise StandardError.new("Circuit breaker triggered - OpenSearch cluster is out of memory")
103
+ else
104
+ puts "Error indexing batch #{batch_num}: #{e.message}"
105
+ raise e
106
+ end
107
+ end
108
+
109
+ # Add a small delay between batches to help with memory pressure
110
+ sleep(0.1) if batch_num < total_batches
111
+ end
112
+
113
+ puts "Successfully seeded #{num_docs} documents to #{index_name}"
114
+ end
115
+
116
+ private
117
+
118
+ def self.parse_mappings(mappings_json)
119
+ # Extract the properties from the mappings
120
+ properties = mappings_json.dig('properties') || {}
121
+ parse_properties(properties)
122
+ end
123
+
124
+ def self.parse_properties(properties)
125
+ schema = {}
126
+
127
+ properties.each do |field_name, field_config|
128
+ # If a field has properties but no explicit type, it's an object
129
+ field_type = field_config['type'] || (field_config['properties'] ? 'object' : 'keyword')
130
+
131
+ schema[field_name] = {
132
+ type: field_type,
133
+ properties: field_config['properties'],
134
+ format: field_config['format']
135
+ }
136
+ end
137
+
138
+ schema
139
+ end
140
+
141
+ def self.generate_document_batch(count, schema)
142
+ count.times.map do
143
+ generate_document(schema)
144
+ end
145
+ end
146
+
147
+ def self.generate_document(schema)
148
+ document = {}
149
+
150
+ schema.each do |field_name, field_config|
151
+ value = generate_field_value(field_config)
152
+ # Skip fields that return nil (like alias fields)
153
+ document[field_name] = value unless value.nil?
154
+ end
155
+
156
+ document
157
+ end
158
+
159
+ def self.generate_field_value(field_config)
160
+ field_type = field_config[:type]
161
+
162
+ case field_type
163
+ when 'text'
164
+ generate_text_value
165
+ when 'keyword'
166
+ generate_keyword_value
167
+ when 'long', 'integer'
168
+ generate_integer_value
169
+ when 'short'
170
+ generate_short_value
171
+ when 'float', 'double'
172
+ generate_float_value
173
+ when 'boolean'
174
+ generate_boolean_value
175
+ when 'date'
176
+ generate_date_value(field_config[:format])
177
+ when 'object'
178
+ generate_object_value(field_config[:properties])
179
+ when 'nested'
180
+ generate_nested_value(field_config[:properties])
181
+ when 'rank_features'
182
+ generate_rank_features_value
183
+ when 'completion'
184
+ generate_completion_value
185
+ when 'search_as_you_type'
186
+ generate_search_as_you_type_value
187
+ when 'token_count'
188
+ generate_token_count_value
189
+ when 'alias'
190
+ # Skip alias fields - they point to other fields
191
+ nil
192
+ when 'byte'
193
+ generate_byte_value
194
+ when 'half_float'
195
+ generate_half_float_value
196
+ when 'scaled_float'
197
+ generate_scaled_float_value
198
+ when 'unsigned_long'
199
+ generate_unsigned_long_value
200
+ when 'date_nanos'
201
+ generate_date_nanos_value
202
+ when 'wildcard'
203
+ generate_wildcard_value
204
+ when 'constant_keyword'
205
+ generate_constant_keyword_value
206
+ when 'geo_shape'
207
+ generate_geo_shape_value
208
+ when 'date_range'
209
+ generate_date_range_value
210
+ when 'integer_range'
211
+ generate_integer_range_value
212
+ when 'float_range'
213
+ generate_float_range_value
214
+ when 'long_range'
215
+ generate_long_range_value
216
+ when 'double_range'
217
+ generate_double_range_value
218
+ when 'ip_range'
219
+ generate_ip_range_value
220
+ when 'geo_point'
221
+ generate_geo_point_value
222
+ when 'ip'
223
+ generate_ip_value
224
+ when 'binary'
225
+ generate_binary_value
226
+ else
227
+ # Default to keyword for unknown types
228
+ generate_keyword_value
229
+ end
230
+ end
231
+
232
+ def self.generate_text_value
233
+ # Generate a paragraph of 10-50 words
234
+ word_count = rand(10..50)
235
+ word_count.times.map { WORD_LIST.sample }.join(' ')
236
+ end
237
+
238
+ def self.generate_keyword_value
239
+ # Generate a short phrase or single word
240
+ case rand(1..4)
241
+ when 1
242
+ WORD_LIST.sample
243
+ when 2
244
+ "#{WORD_LIST.sample}_#{rand(1000..9999)}"
245
+ when 3
246
+ "#{WORD_LIST.sample} #{WORD_LIST.sample}"
247
+ when 4
248
+ "#{WORD_LIST.sample}-#{WORD_LIST.sample}"
249
+ end
250
+ end
251
+
252
+ def self.generate_integer_value
253
+ # Generate reasonable integer values based on common use cases
254
+ case rand(1..5)
255
+ when 1
256
+ rand(1..1000) # Small positive numbers
257
+ when 2
258
+ rand(1_000_000..999_999_999) # Large IDs
259
+ when 3
260
+ rand(-100..100) # Small range including negatives
261
+ when 4
262
+ rand(1..100) # Percentages/scores
263
+ when 5
264
+ rand(1..365) # Days/periods
265
+ end
266
+ end
267
+
268
+ def self.generate_short_value
269
+ # Generate short values within Java short range (-32,768 to 32,767)
270
+ case rand(1..3)
271
+ when 1
272
+ rand(1..100) # Small positive numbers (common for ratings, counts)
273
+ when 2
274
+ rand(-100..100) # Small range including negatives
275
+ when 3
276
+ rand(1..10) # Very small numbers (ratings, flags)
277
+ end
278
+ end
279
+
280
+ def self.generate_float_value
281
+ # Generate decimal numbers
282
+ case rand(1..3)
283
+ when 1
284
+ (rand * 100).round(2) # 0-100 with 2 decimal places
285
+ when 2
286
+ (rand * 1000).round(4) # 0-1000 with 4 decimal places
287
+ when 3
288
+ (rand * 10 - 5).round(3) # -5 to 5 with 3 decimal places
289
+ end
290
+ end
291
+
292
+ def self.generate_boolean_value
293
+ [true, false].sample
294
+ end
295
+
296
+ def self.generate_date_value(format = nil)
297
+ # Generate a random date within the last year
298
+ start_time = Time.now - (365 * 24 * 60 * 60) # one year ago
299
+ random_time = Time.at(start_time.to_i + rand(Time.now.to_i - start_time.to_i))
300
+
301
+ case format
302
+ when 'epoch_millis'
303
+ (random_time.to_f * 1000).to_i
304
+ when 'epoch_second'
305
+ random_time.to_i
306
+ when 'yyyy-MM-dd'
307
+ random_time.strftime('%Y-%m-%d')
308
+ when 'yyyy-MM-dd HH:mm:ss'
309
+ random_time.strftime('%Y-%m-%d %H:%M:%S')
310
+ when 'MM/dd/yyyy'
311
+ random_time.strftime('%m/%d/%Y')
312
+ when 'dd-MM-yyyy'
313
+ random_time.strftime('%d-%m-%Y')
314
+ else
315
+ # Default to ISO 8601 format
316
+ random_time.iso8601
317
+ end
318
+ end
319
+
320
+ def self.generate_object_value(properties)
321
+ return {} unless properties
322
+
323
+ object = {}
324
+ properties.each do |nested_field_name, nested_field_config|
325
+ # If a field has properties but no explicit type, it's an object
326
+ field_type = nested_field_config['type'] || (nested_field_config['properties'] ? 'object' : 'keyword')
327
+
328
+ parsed_config = {
329
+ type: field_type,
330
+ properties: nested_field_config['properties'],
331
+ format: nested_field_config['format']
332
+ }
333
+ object[nested_field_name] = generate_field_value(parsed_config)
334
+ end
335
+ object
336
+ end
337
+
338
+ def self.generate_nested_value(properties)
339
+ return [] unless properties
340
+
341
+ # Generate 1-3 nested objects
342
+ count = rand(1..3)
343
+ count.times.map do
344
+ object = {}
345
+ properties.each do |nested_field_name, nested_field_config|
346
+ # If a field has properties but no explicit type, it's an object
347
+ field_type = nested_field_config['type'] || (nested_field_config['properties'] ? 'object' : 'keyword')
348
+
349
+ parsed_config = {
350
+ type: field_type,
351
+ properties: nested_field_config['properties'],
352
+ format: nested_field_config['format']
353
+ }
354
+ object[nested_field_name] = generate_field_value(parsed_config)
355
+ end
356
+ object
357
+ end
358
+ end
359
+
360
+ def self.generate_rank_features_value
361
+ # Generate a rank_features object with random feature names and scores
362
+ # OpenSearch requires positive normal floats with minimum value of 1.17549435E-38
363
+ feature_count = rand(3..8)
364
+ features = {}
365
+
366
+ feature_count.times do
367
+ feature_name = "#{WORD_LIST.sample}_#{rand(100..999)}"
368
+ # Generate values between 1.0e-30 and 1.0 to ensure positive normal floats
369
+ # Use a higher minimum to avoid floating-point precision issues
370
+ min_value = 1.0e-30 # Much higher than the OpenSearch minimum
371
+ value = rand(min_value..1.0).round(4)
372
+ # Ensure we never get exactly 0.0 due to floating-point precision
373
+ value = [value, 1.0e-30].max
374
+ features[feature_name] = value
375
+ end
376
+
377
+ features
378
+ end
379
+
380
+ def self.generate_geo_point_value
381
+ # Generate random latitude/longitude coordinates
382
+ {
383
+ lat: (rand * 180 - 90).round(6), # -90 to 90
384
+ lon: (rand * 360 - 180).round(6) # -180 to 180
385
+ }
386
+ end
387
+
388
+ def self.generate_ip_value
389
+ # Generate random IP addresses
390
+ case rand(1..2)
391
+ when 1
392
+ # IPv4
393
+ "#{rand(1..254)}.#{rand(0..255)}.#{rand(0..255)}.#{rand(1..254)}"
394
+ when 2
395
+ # IPv6 (simplified)
396
+ "2001:db8::#{rand(1000..9999)}:#{rand(1000..9999)}:#{rand(1000..9999)}:#{rand(1000..9999)}"
397
+ end
398
+ end
399
+
400
+ def self.generate_binary_value
401
+ # Generate base64 encoded random data
402
+ require 'base64'
403
+ random_bytes = (0...32).map { rand(256) }.pack('C*')
404
+ Base64.encode64(random_bytes).strip
405
+ end
406
+
407
+ def self.generate_completion_value
408
+ # Generate completion suggestions
409
+ {
410
+ 'input' => [WORD_LIST.sample, "#{WORD_LIST.sample} #{WORD_LIST.sample}"],
411
+ 'weight' => rand(1..100)
412
+ }
413
+ end
414
+
415
+ def self.generate_search_as_you_type_value
416
+ # Generate search-as-you-type text
417
+ "#{WORD_LIST.sample} #{WORD_LIST.sample} #{WORD_LIST.sample}"
418
+ end
419
+
420
+ def self.generate_token_count_value
421
+ # Generate token count (integer representing number of tokens)
422
+ rand(1..50)
423
+ end
424
+
425
+ def self.generate_byte_value
426
+ # Generate byte values (-128 to 127)
427
+ rand(-128..127)
428
+ end
429
+
430
+ def self.generate_half_float_value
431
+ # Generate half-float values (smaller range than regular float)
432
+ (rand * 100 - 50).round(2)
433
+ end
434
+
435
+ def self.generate_scaled_float_value
436
+ # Generate scaled float values (multiplied by scaling factor)
437
+ (rand * 100).round(2)
438
+ end
439
+
440
+ def self.generate_unsigned_long_value
441
+ # Generate unsigned long values (0 to 2^64-1, but keep reasonable)
442
+ rand(0..999_999_999)
443
+ end
444
+
445
+ def self.generate_date_nanos_value
446
+ # Generate date with nanosecond precision
447
+ start_time = Time.now - (365 * 24 * 60 * 60)
448
+ random_time = Time.at(start_time.to_i + rand(Time.now.to_i - start_time.to_i))
449
+ random_time.iso8601(9) # Include nanoseconds
450
+ end
451
+
452
+ def self.generate_wildcard_value
453
+ # Generate wildcard text (similar to keyword but optimized for wildcard queries)
454
+ "#{WORD_LIST.sample}_#{rand(1000..9999)}"
455
+ end
456
+
457
+ def self.generate_constant_keyword_value
458
+ # Generate constant keyword (always the same value)
459
+ "constant_value"
460
+ end
461
+
462
+ def self.generate_geo_shape_value
463
+ # Generate simple geo shapes (point)
464
+ {
465
+ 'type' => "point",
466
+ 'coordinates' => [rand(-180.0..180.0).round(6), rand(-90.0..90.0).round(6)]
467
+ }
468
+ end
469
+
470
+ def self.generate_date_range_value
471
+ # Generate date range
472
+ start_date = Time.now - (365 * 24 * 60 * 60)
473
+ end_date = Time.now
474
+ {
475
+ 'gte' => start_date.iso8601,
476
+ 'lte' => end_date.iso8601
477
+ }
478
+ end
479
+
480
+ def self.generate_integer_range_value
481
+ # Generate integer range
482
+ start_val = rand(-1000..1000)
483
+ end_val = start_val + rand(1..1000)
484
+ {
485
+ 'gte' => start_val,
486
+ 'lte' => end_val
487
+ }
488
+ end
489
+
490
+ def self.generate_float_range_value
491
+ # Generate float range
492
+ start_val = (rand * 100 - 50).round(2)
493
+ end_val = start_val + (rand * 100).round(2)
494
+ {
495
+ 'gte' => start_val,
496
+ 'lte' => end_val
497
+ }
498
+ end
499
+
500
+ def self.generate_long_range_value
501
+ # Generate long range
502
+ start_val = rand(-1_000_000..1_000_000)
503
+ end_val = start_val + rand(1..1_000_000)
504
+ {
505
+ 'gte' => start_val,
506
+ 'lte' => end_val
507
+ }
508
+ end
509
+
510
+ def self.generate_double_range_value
511
+ # Generate double range
512
+ start_val = (rand * 1000 - 500).round(4)
513
+ end_val = start_val + (rand * 1000).round(4)
514
+ {
515
+ 'gte' => start_val,
516
+ 'lte' => end_val
517
+ }
518
+ end
519
+
520
+ def self.generate_ip_range_value
521
+ # Generate IP range with proper ordering
522
+ # Generate a base IP and add a small range to it
523
+ base_ip = "#{rand(1..254)}.#{rand(0..255)}.#{rand(0..255)}.#{rand(1..254)}"
524
+
525
+ # Parse the last octet and create a small range
526
+ parts = base_ip.split('.')
527
+ last_octet = parts[3].to_i
528
+ start_last = [last_octet, 254].min
529
+ end_last = [start_last + rand(1..10), 254].min
530
+
531
+ start_ip = "#{parts[0]}.#{parts[1]}.#{parts[2]}.#{start_last}"
532
+ end_ip = "#{parts[0]}.#{parts[1]}.#{parts[2]}.#{end_last}"
533
+
534
+ {
535
+ 'gte' => start_ip,
536
+ 'lte' => end_ip
537
+ }
538
+ end
539
+ end
@@ -0,0 +1,150 @@
1
+ require 'schema_tools/client'
2
+ require 'schema_tools/schema_files'
3
+ require 'schema_tools/config'
4
+ require 'schema_tools/migrate/migrate'
5
+ require 'schema_tools/painless_scripts_download'
6
+ require 'schema_tools/painless_scripts_upload'
7
+ require 'schema_tools/painless_scripts_delete'
8
+ require 'schema_tools/close'
9
+ require 'schema_tools/delete'
10
+ require 'schema_tools/download'
11
+ require 'schema_tools/new_alias'
12
+ require 'schema_tools/seed'
13
+ require 'schema_tools/diff'
14
+ require 'seeder/seeder'
15
+ require 'json'
16
+ require 'time'
17
+
18
+
19
+ def create_client!
20
+ # Check if connection URL is configured
21
+ if SchemaTools::Config.connection_url.nil?
22
+ puts "No connection URL configured."
23
+ puts "Please set either OPENSEARCH_URL or ELASTICSEARCH_URL environment variable."
24
+ puts "Example:"
25
+ puts " export OPENSEARCH_URL=http://localhost:9200"
26
+ puts " export ELASTICSEARCH_URL=https://your-cluster.com"
27
+ puts "Then re-run the command."
28
+ exit 1
29
+ end
30
+
31
+ # Initialize client and test connection
32
+ client = SchemaTools::Client.new(
33
+ SchemaTools::Config.connection_url,
34
+ dryrun: ENV['DRYRUN'] == 'true',
35
+ username: SchemaTools::Config.connection_username,
36
+ password: SchemaTools::Config.connection_password
37
+ )
38
+ unless client.test_connection
39
+ puts "Failed to connect to OpenSearch/Elasticsearch at #{SchemaTools::Config.connection_url}"
40
+ puts "Please ensure that OPENSEARCH_URL or ELASTICSEARCH_URL environment variable is set correctly."
41
+ puts "Example:"
42
+ puts " export OPENSEARCH_URL=http://localhost:9200"
43
+ puts " export ELASTICSEARCH_URL=https://your-cluster.com"
44
+ puts "Then re-run the command."
45
+ exit 1
46
+ end
47
+ client
48
+ end
49
+
50
+ namespace :schema do
51
+ desc "Migrate to a specific alias schema or migrate all schemas to their latest revisions"
52
+ task :migrate, [:alias_name] do |t, args|
53
+ client = create_client!
54
+
55
+ if args[:alias_name]
56
+ SchemaTools.migrate_one_schema(alias_name: args[:alias_name], client: client)
57
+ else
58
+ SchemaTools.migrate_all(client: client)
59
+ end
60
+ end
61
+
62
+ desc "Create a new alias with sample schema"
63
+ task :new do |t, args|
64
+ client = create_client!
65
+
66
+ SchemaTools.new_alias(
67
+ client: client
68
+ )
69
+ end
70
+
71
+ desc "schema:new"
72
+ task :create => :new
73
+
74
+ desc "Close an index or alias"
75
+ task :close, [:name] do |t, args|
76
+ client = create_client!
77
+
78
+ SchemaTools.close(
79
+ name: args[:name],
80
+ client: client
81
+ )
82
+ end
83
+
84
+ desc "Hard delete an index (only works on closed indexes) or delete an alias"
85
+ task :delete, [:name] do |t, args|
86
+ client = create_client!
87
+
88
+ SchemaTools.delete(
89
+ name: args[:name],
90
+ client: client
91
+ )
92
+ end
93
+
94
+ desc "Download schema from an existing alias or index"
95
+ task :download do |t, args|
96
+ client = create_client!
97
+
98
+ SchemaTools.download(
99
+ client: client
100
+ )
101
+ end
102
+
103
+ desc "Create an alias for an existing index"
104
+ task :alias do |t, args|
105
+ client = create_client!
106
+
107
+ SchemaTools.create_alias_for_index(
108
+ client: client
109
+ )
110
+ end
111
+
112
+ desc "Seed data to a live index"
113
+ task :seed do |t, args|
114
+ client = create_client!
115
+
116
+ SchemaTools.seed(
117
+ client: client
118
+ )
119
+ end
120
+
121
+ desc "Compare all schemas to their corresponding downloaded alias settings and mappings"
122
+ task :diff do |t, args|
123
+ client = create_client!
124
+
125
+ SchemaTools::Diff.diff_all_schemas(client)
126
+ end
127
+ end
128
+
129
+ namespace :painless_scripts do
130
+ desc "Download all painless scripts from cluster and store them locally"
131
+ task :download do |t, args|
132
+ client = create_client!
133
+
134
+ SchemaTools.painless_scripts_download(client: client)
135
+ end
136
+
137
+ desc "Upload all painless scripts from local directory to cluster"
138
+ task :upload do |t, args|
139
+ client = create_client!
140
+
141
+ SchemaTools.painless_scripts_upload(client: client)
142
+ end
143
+
144
+ desc "Delete a specific painless script from cluster"
145
+ task :delete, [:script_name] do |t, args|
146
+ client = create_client!
147
+
148
+ SchemaTools.painless_scripts_delete(script_name: args[:script_name], client: client)
149
+ end
150
+ end
@@ -0,0 +1,8 @@
1
+ require 'rspec/core/rake_task'
2
+
3
+ RSpec::Core::RakeTask.new(:spec) do |t|
4
+ t.pattern = 'test/**/*_spec.rb'
5
+ t.rspec_opts = '--require ./test/spec_helper'
6
+ end
7
+
8
+ task :default => :spec