quill-sql 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,60 @@
1
+ require 'redis'
2
+ require 'json'
3
+ require_relative 'db_helper'
4
+
5
+ class CachedConnection
6
+ DEFAULT_CACHE_TTL = 24 * 60 * 60 # 24 hours in seconds
7
+
8
+ attr_reader :database_type, :pool, :ttl, :cache
9
+ attr_reader :closed
10
+ attr_writer :tenant_ids
11
+
12
+ def initialize(database_type, config, cache_config = {})
13
+ @database_type = database_type
14
+ @pool = DatabaseHelper.connect_to_database(database_type, config)
15
+ @tenant_ids = nil
16
+ @ttl = cache_config[:ttl] || DEFAULT_CACHE_TTL
17
+ @cache = get_cache(cache_config)
18
+ @closed = false
19
+ end
20
+
21
+ def query(text)
22
+ raise "Connection is closed" if @closed
23
+
24
+ if @cache.nil?
25
+ return DatabaseHelper.run_query_by_database(@database_type, @pool, text)
26
+ end
27
+
28
+ key = "#{@tenant_ids}:#{text}"
29
+ cached_result = @cache.get(key)
30
+
31
+ if cached_result
32
+ JSON.parse(cached_result)
33
+ else
34
+ new_result = DatabaseHelper.run_query_by_database(@database_type, @pool, text)
35
+ new_result_string = JSON.generate(new_result)
36
+ @cache.set(key, new_result_string, "EX", DEFAULT_CACHE_TTL)
37
+ new_result
38
+ end
39
+ rescue StandardError => e
40
+ raise StandardError, e.message
41
+ end
42
+
43
+ def get_pool
44
+ @pool
45
+ end
46
+
47
+ def close
48
+ DatabaseHelper.disconnect_from_database(@database_type, @pool)
49
+ @closed = true
50
+ end
51
+
52
+ private
53
+
54
+ def get_cache(config)
55
+ return nil unless config[:cacheType]&.match?(/^redis(s)?$/)
56
+
57
+ redis_url = "#{config[:cacheType]}://#{config[:username]}:#{config[:password]}@#{config[:host]}:#{config[:port]}"
58
+ Redis.new(url: redis_url)
59
+ end
60
+ end
@@ -0,0 +1,333 @@
1
+ require 'uri'
2
+ require 'json'
3
+ require 'click_house'
4
+
5
+ module ClickHouseHelper
6
+ # Constants
7
+ CLICKHOUSE_PG_TYPE_MAP = {
8
+ # Signed Integer Types and Aliases
9
+ 'Int8' => 21,
10
+ 'TINYINT' => 21,
11
+ 'INT1' => 21,
12
+ 'BYTE' => 21,
13
+ 'TINYINT SIGNED' => 21,
14
+ 'INT1 SIGNED' => 21,
15
+ 'Int16' => 21,
16
+ 'SMALLINT' => 21,
17
+ 'SMALLINT SIGNED' => 21,
18
+ 'Int32' => 23,
19
+ 'INT' => 23,
20
+ 'INTEGER' => 23,
21
+ 'MEDIUMINT' => 23,
22
+ 'MEDIUMINT SIGNED' => 23,
23
+ 'INT SIGNED' => 23,
24
+ 'INTEGER SIGNED' => 23,
25
+ 'Int64' => 20,
26
+ 'BIGINT' => 20,
27
+ 'SIGNED' => 20,
28
+ 'BIGINT SIGNED' => 20,
29
+ 'TIME' => 20,
30
+ # Unsigned Integer Types and Aliases
31
+ 'UInt8' => 21,
32
+ 'TINYINT UNSIGNED' => 21,
33
+ 'INT1 UNSIGNED' => 21,
34
+ 'UInt16' => 21,
35
+ 'SMALLINT UNSIGNED' => 21,
36
+ 'UInt32' => 23,
37
+ 'MEDIUMINT UNSIGNED' => 23,
38
+ 'INT UNSIGNED' => 23,
39
+ 'INTEGER UNSIGNED' => 23,
40
+ 'UInt64' => 20,
41
+ 'UNSIGNED' => 20,
42
+ 'BIGINT UNSIGNED' => 20,
43
+ 'BIT' => 20,
44
+ 'SET' => 20,
45
+ # Floating Point Types and Aliases
46
+ 'Float32' => 700,
47
+ 'FLOAT' => 700,
48
+ 'REAL' => 700,
49
+ 'SINGLE' => 700,
50
+ 'Float64' => 701,
51
+ 'DOUBLE' => 701,
52
+ 'DOUBLE PRECISION' => 701,
53
+ 'BFloat16' => 700,
54
+ # Decimal Types
55
+ 'Decimal' => 1700,
56
+ 'Decimal32' => 1700,
57
+ 'Decimal64' => 1700,
58
+ 'Decimal128' => 1700,
59
+ 'Decimal256' => 1700,
60
+ # Boolean Type
61
+ 'Bool' => 16,
62
+ # String Types and Aliases
63
+ 'String' => 25,
64
+ 'LONGTEXT' => 25,
65
+ 'MEDIUMTEXT' => 25,
66
+ 'TINYTEXT' => 25,
67
+ 'TEXT' => 25,
68
+ 'LONGBLOB' => 17,
69
+ 'MEDIUMBLOB' => 17,
70
+ 'TINYBLOB' => 17,
71
+ 'BLOB' => 17,
72
+ 'VARCHAR' => 1043,
73
+ 'CHAR' => 1042,
74
+ 'CHAR LARGE OBJECT' => 25,
75
+ 'CHAR VARYING' => 1043,
76
+ 'CHARACTER LARGE OBJECT' => 25,
77
+ 'CHARACTER VARYING' => 1043,
78
+ 'NCHAR LARGE OBJECT' => 25,
79
+ 'NCHAR VARYING' => 1043,
80
+ 'NATIONAL CHARACTER LARGE OBJECT' => 25,
81
+ 'NATIONAL CHARACTER VARYING' => 1043,
82
+ 'NATIONAL CHAR VARYING' => 1043,
83
+ 'NATIONAL CHARACTER' => 1042,
84
+ 'NATIONAL CHAR' => 1042,
85
+ 'BINARY LARGE OBJECT' => 17,
86
+ 'BINARY VARYING' => 17,
87
+ # Fixed String
88
+ 'FixedString' => 1042,
89
+ # Identifier Types
90
+ 'UUID' => 2950,
91
+ # Date and Time Types
92
+ 'Date' => 1082,
93
+ 'Date32' => 1082,
94
+ 'DateTime' => 1184,
95
+ 'DateTime64' => 1184,
96
+ # Array Types
97
+ 'Array' => 2277,
98
+ # JSON-like Types
99
+ 'JSON' => 3802,
100
+ 'Nested' => 3802,
101
+ # Binary Types
102
+ 'IPv4' => 17,
103
+ 'IPv6' => 17,
104
+ # Enum Types
105
+ 'Enum8' => 10045,
106
+ 'Enum16' => 10045,
107
+ # Geospatial-like Types
108
+ 'Point' => 17,
109
+ 'Ring' => 17,
110
+ 'Polygon' => 17,
111
+ # Specialized Types
112
+ 'Nothing' => 17,
113
+ 'Interval' => 1186
114
+ }.freeze
115
+
116
+ class << self
117
+ def parse_clickhouse_type(type)
118
+ # Remove whitespace and handle common variations
119
+ normalized_type = type.strip.gsub(/\s+/, ' ')
120
+
121
+ # Handle Object types
122
+ return CLICKHOUSE_PG_TYPE_MAP['JSON'] if normalized_type.start_with?('Map(')
123
+ return CLICKHOUSE_PG_TYPE_MAP['JSON'] if normalized_type.start_with?('AggregateFunction(')
124
+ return CLICKHOUSE_PG_TYPE_MAP['JSON'] if normalized_type.start_with?('SimpleAggregateFunction(')
125
+
126
+ # Handle Nullable types
127
+ if normalized_type.start_with?('Nullable(')
128
+ inner_type = normalized_type[9..-2]
129
+ return parse_clickhouse_type(inner_type)
130
+ end
131
+
132
+ # Handle Array types
133
+ return CLICKHOUSE_PG_TYPE_MAP['Array'] if normalized_type.start_with?('Array(')
134
+ return CLICKHOUSE_PG_TYPE_MAP['Array'] if normalized_type.start_with?('Tuple(')
135
+
136
+ # Handle Enum types
137
+ return CLICKHOUSE_PG_TYPE_MAP['Enum8'] if normalized_type.start_with?('Enum8(')
138
+ return CLICKHOUSE_PG_TYPE_MAP['Enum16'] if normalized_type.start_with?('Enum16(')
139
+
140
+ # Handle Decimal types
141
+ if normalized_type.match?(/^Decimal(\d*)?\(/)
142
+ return CLICKHOUSE_PG_TYPE_MAP['Decimal']
143
+ end
144
+
145
+ # Handle DateTime types
146
+ return CLICKHOUSE_PG_TYPE_MAP['DateTime'] if normalized_type.start_with?('DateTime(')
147
+ return CLICKHOUSE_PG_TYPE_MAP['DateTime64'] if normalized_type.start_with?('DateTime64(')
148
+
149
+ # Handle FixedString
150
+ return CLICKHOUSE_PG_TYPE_MAP['FixedString'] if normalized_type.start_with?('FixedString(')
151
+
152
+ # Handle LowCardinality
153
+ if normalized_type.start_with?('LowCardinality(')
154
+ inner_type = normalized_type[15..-2]
155
+ return parse_clickhouse_type(inner_type)
156
+ end
157
+
158
+ # Direct lookup (case-insensitive)
159
+ lookup_type = CLICKHOUSE_PG_TYPE_MAP.keys.find { |key| key.downcase == normalized_type.downcase }
160
+ return CLICKHOUSE_PG_TYPE_MAP[lookup_type] if lookup_type
161
+
162
+ warn "Unknown ClickHouse type: #{type}. Defaulting to VARCHAR."
163
+ 1043 # Default to Varchar
164
+ end
165
+
166
+ def connect_to_clickhouse(config)
167
+ # Configure ClickHouse connection with provided config
168
+ ClickHouse.config do |config_object|
169
+ config_object.url = config[:url]
170
+ config_object.username = config[:username]
171
+ config_object.password = config[:password]
172
+ end
173
+
174
+ # Return the connection object
175
+ ClickHouse.connection
176
+ end
177
+
178
+ def disconnect_from_clickhouse(client)
179
+ client.close if client.respond_to?(:close)
180
+ end
181
+
182
+ def run_query_clickhouse(sql, client)
183
+ # Need to include FORMAT JSON at the end of query to include clickhouse metadata
184
+ # Remove existing FORMAT from query, remove ending semicolon, and add FORMAT JSON
185
+ response = response = client.execute(sql.gsub(/\s*FORMAT\s+\w+/i, '').gsub(/;\s*$/, '') + ' FORMAT JSON')
186
+
187
+ data = response.body
188
+
189
+ fields = data['meta']&.map do |field|
190
+ {
191
+ name: field['name'],
192
+ dataTypeID: parse_clickhouse_type(field['type'])
193
+ }
194
+ end || []
195
+
196
+ {
197
+ fields: fields,
198
+ rows: data['data']
199
+ }
200
+ end
201
+
202
+ def get_schemas_clickhouse(client)
203
+ sql = <<~SQL
204
+ SELECT DISTINCT database AS schema_name
205
+ FROM system.tables
206
+ WHERE LOWER(database) NOT IN ('system', 'information_schema')
207
+ SQL
208
+
209
+ results = run_query_clickhouse(sql, client)
210
+ results[:rows].map { |row| row['schema_name'] }
211
+ end
212
+
213
+ def get_tables_by_schema_clickhouse(client, schema_names)
214
+ all_tables = schema_names.flat_map do |schema|
215
+ sql = <<~SQL
216
+ SELECT name as table_name, database as table_schema
217
+ FROM system.tables
218
+ WHERE database = '#{schema}'
219
+ SQL
220
+
221
+ results = run_query_clickhouse(sql, client)
222
+ results[:rows].map do |row|
223
+ {
224
+ tableName: row['table_name'],
225
+ schemaName: row['table_schema']
226
+ }
227
+ end
228
+ end
229
+
230
+ all_tables
231
+ end
232
+
233
+ def get_columns_by_table_clickhouse(client, schema_name, table_name)
234
+ sql = <<~SQL
235
+ SELECT name as column_name
236
+ FROM system.columns
237
+ WHERE database = '#{schema_name}' AND table = '#{table_name}'
238
+ SQL
239
+
240
+ results = run_query_clickhouse(sql, client)
241
+ results[:rows].map { |row| row['column_name'] }
242
+ end
243
+
244
+ def get_foreign_keys_clickhouse(client, schema_name, table_name, primary_key)
245
+ depluralized_table_name = depluralize(table_name)
246
+
247
+ sql = <<~SQL
248
+ SELECT column_name
249
+ FROM information_schema.columns
250
+ WHERE table_schema = '#{schema_name}'
251
+ AND table_name != '#{table_name}'
252
+ AND (column_name = '#{primary_key}'
253
+ OR column_name = '#{depluralized_table_name}_#{primary_key}'
254
+ OR column_name = '#{depluralized_table_name}#{capitalize(primary_key)}')
255
+ SQL
256
+
257
+ results = run_query_clickhouse(sql, client)
258
+ foreign_keys = results[:rows].map { |key| key['column_name'] }
259
+
260
+ foreign_keys = foreign_keys.reject { |key| ['id', '_id_'].include?(key) }
261
+ foreign_keys = foreign_keys.uniq
262
+
263
+ if foreign_keys.empty?
264
+ sql = <<~SQL
265
+ SELECT column_name
266
+ FROM information_schema.columns
267
+ WHERE table_schema = '#{schema_name}'
268
+ AND table_name != '#{table_name}'
269
+ AND (column_name LIKE '#{table_name}%'
270
+ OR column_name LIKE '%\_id'
271
+ OR column_name LIKE '%Id'
272
+ OR column_name LIKE '%\_#{primary_key}'
273
+ OR column_name LIKE '%#{capitalize(primary_key)}')
274
+ SQL
275
+
276
+ results = run_query_clickhouse(sql, client)
277
+ foreign_keys = results[:rows].map { |key| key['column_name'] }.uniq
278
+ end
279
+
280
+ foreign_keys
281
+ end
282
+
283
+ def get_schema_column_info_clickhouse(client, schema_name, table_names)
284
+ table_names.map do |table_name|
285
+ query = <<~SQL
286
+ SELECT
287
+ name as "column_name",
288
+ type as "field_type"
289
+ FROM system.columns
290
+ WHERE database = '#{table_name[:schemaName]}'
291
+ AND table = '#{table_name[:tableName]}'
292
+ SQL
293
+
294
+ results = run_query_clickhouse(query, client)
295
+ {
296
+ tableName: "#{table_name[:schemaName]}.#{table_name[:tableName]}",
297
+ displayName: "#{table_name[:schemaName]}.#{table_name[:tableName]}",
298
+ columns: results[:rows].map do |row|
299
+ type_oid = parse_clickhouse_type(row['field_type'])
300
+ {
301
+ columnName: row['column_name'],
302
+ displayName: row['column_name'],
303
+ dataTypeID: type_oid,
304
+ fieldType: row['field_type']
305
+ }
306
+ end
307
+ }
308
+ end
309
+ end
310
+
311
+ def format_clickhouse_config(connection_string)
312
+ parsed = URI.parse(connection_string)
313
+
314
+ {
315
+ url: "#{parsed.scheme}://#{parsed.host}:#{parsed.port}",
316
+ username: parsed.user || 'default',
317
+ password: parsed.password || ''
318
+ }
319
+ end
320
+
321
+ private
322
+
323
+ def capitalize(str)
324
+ str.capitalize
325
+ end
326
+
327
+ def depluralize(str)
328
+ # Simple depluralization - you might want to use a proper library like ActiveSupport
329
+ return str[0..-2] if str.end_with?('s')
330
+ str
331
+ end
332
+ end
333
+ end
@@ -0,0 +1,115 @@
1
+ require 'json'
2
+ require 'uri'
3
+ require_relative 'clickhouse'
4
+
5
+ module DatabaseHelper
6
+ SUPPORTED_DATABASES = ['clickhouse'].freeze # Add others as they're implemented
7
+
8
+ class QuillQueryResults
9
+ attr_reader :fields, :rows
10
+
11
+ def initialize(fields, rows)
12
+ @fields = fields
13
+ @rows = rows
14
+ end
15
+ end
16
+
17
+ class DatabaseError < StandardError; end
18
+
19
+ def self.get_database_credentials(database_type, connection_string)
20
+ case database_type.downcase
21
+ when 'clickhouse'
22
+ ClickHouseHelper.format_clickhouse_config(connection_string)
23
+ else
24
+ raise DatabaseError, "Invalid database type: #{database_type}"
25
+ end
26
+ end
27
+
28
+ def self.connect_to_database(database_type, config)
29
+ case database_type.downcase
30
+ when 'clickhouse'
31
+ ClickHouseHelper.connect_to_clickhouse(config)
32
+ else
33
+ raise DatabaseError, "Invalid database type: #{database_type}"
34
+ end
35
+ end
36
+
37
+ def self.with_connection(database_type, connection_string)
38
+ config = get_database_credentials(database_type, connection_string)
39
+ connection = connect_to_database(database_type, config)
40
+ begin
41
+ yield(connection)
42
+ rescue StandardError => e
43
+ { success: false, message: e.message || e.error || e.to_s }
44
+ ensure
45
+ disconnect_from_database(database_type, connection)
46
+ end
47
+ end
48
+
49
+ def self.run_query_by_database(database_type, connection, sql)
50
+ case database_type.downcase
51
+ when 'clickhouse'
52
+ ClickHouseHelper.run_query_clickhouse(sql, connection)
53
+ else
54
+ raise DatabaseError, "Invalid database type: #{database_type}"
55
+ end
56
+ end
57
+
58
+ def self.connect_and_run_query(database_type, connection_string, sql)
59
+ with_connection(database_type, connection_string) do |connection|
60
+ run_query_by_database(database_type, connection, sql)
61
+ end
62
+ end
63
+
64
+ def self.disconnect_from_database(database_type, database)
65
+ case database_type.downcase
66
+ when 'clickhouse'
67
+ ClickHouseHelper.disconnect_from_clickhouse(database)
68
+ end
69
+ end
70
+
71
+ def self.get_schemas_by_database(database_type, connection)
72
+ case database_type.downcase
73
+ when 'clickhouse'
74
+ ClickHouseHelper.get_schemas_clickhouse(connection)
75
+ else
76
+ raise DatabaseError, "Invalid database type: #{database_type}"
77
+ end
78
+ end
79
+
80
+ def self.get_tables_by_schema_by_database(database_type, connection, schema_name)
81
+ case database_type.downcase
82
+ when 'clickhouse'
83
+ ClickHouseHelper.get_tables_by_schema_clickhouse(connection, schema_name)
84
+ else
85
+ raise DatabaseError, "Invalid database type: #{database_type}"
86
+ end
87
+ end
88
+
89
+ def self.get_columns_by_table_by_database(database_type, connection, schema_name, table_name)
90
+ case database_type.downcase
91
+ when 'clickhouse'
92
+ ClickHouseHelper.get_columns_by_table_clickhouse(connection, schema_name, table_name)
93
+ else
94
+ raise DatabaseError, "Invalid database type: #{database_type}"
95
+ end
96
+ end
97
+
98
+ def self.get_foreign_keys_by_database(database_type, connection, schema_name, table_name, primary_key)
99
+ case database_type.downcase
100
+ when 'clickhouse'
101
+ ClickHouseHelper.get_foreign_keys_clickhouse(connection, schema_name, table_name, primary_key)
102
+ else
103
+ raise DatabaseError, "Invalid database type: #{database_type}"
104
+ end
105
+ end
106
+
107
+ def self.get_column_info_by_schema_by_database(database_type, connection, schema_name, tables)
108
+ case database_type.downcase
109
+ when 'clickhouse'
110
+ ClickHouseHelper.get_schema_column_info_clickhouse(connection, schema_name, tables)
111
+ else
112
+ raise DatabaseError, "Invalid database type: #{database_type}"
113
+ end
114
+ end
115
+ end
@@ -0,0 +1,142 @@
1
+ module StringOperator
2
+ IS_EXACTLY = 'is exactly'
3
+ IS_NOT_EXACTLY = 'is not exactly'
4
+ CONTAINS = 'contains'
5
+ IS = 'is'
6
+ IS_NOT = 'is not'
7
+ end
8
+
9
+ module DateOperator
10
+ CUSTOM = 'custom'
11
+ IN_THE_LAST = 'in the last'
12
+ IN_THE_PREVIOUS = 'in the previous'
13
+ IN_THE_CURRENT = 'in the current'
14
+ EQUAL_TO = 'equal to'
15
+ NOT_EQUAL_TO = 'not equal to'
16
+ GREATER_THAN = 'greater than'
17
+ LESS_THAN = 'less than'
18
+ GREATER_THAN_OR_EQUAL_TO = 'greater than or equal to'
19
+ LESS_THAN_OR_EQUAL_TO = 'less than or equal to'
20
+ end
21
+
22
+ module NumberOperator
23
+ EQUAL_TO = 'equal to'
24
+ NOT_EQUAL_TO = 'not equal to'
25
+ GREATER_THAN = 'greater than'
26
+ LESS_THAN = 'less than'
27
+ GREATER_THAN_OR_EQUAL_TO = 'greater than or equal to'
28
+ LESS_THAN_OR_EQUAL_TO = 'less than or equal to'
29
+ end
30
+
31
+ module NullOperator
32
+ IS_NOT_NULL = 'is not null'
33
+ IS_NULL = 'is null'
34
+ end
35
+
36
+ module BoolOperator
37
+ EQUAL_TO = 'equal to'
38
+ NOT_EQUAL_TO = 'not equal to'
39
+ end
40
+
41
+ module TimeUnit
42
+ YEAR = 'year'
43
+ QUARTER = 'quarter'
44
+ MONTH = 'month'
45
+ WEEK = 'week'
46
+ DAY = 'day'
47
+ HOUR = 'hour'
48
+ end
49
+
50
+ module FieldType
51
+ STRING = 'string'
52
+ NUMBER = 'number'
53
+ DATE = 'date'
54
+ NULL = 'null'
55
+ BOOLEAN = 'boolean'
56
+ end
57
+
58
+ module FilterType
59
+ STRING_FILTER = 'string-filter'
60
+ DATE_FILTER = 'date-filter'
61
+ DATE_CUSTOM_FILTER = 'date-custom-filter'
62
+ DATE_COMPARISON_FILTER = 'date-comparison-filter'
63
+ NUMERIC_FILTER = 'numeric-filter'
64
+ NULL_FILTER = 'null-filter'
65
+ STRING_IN_FILTER = 'string-in-filter'
66
+ BOOLEAN_FILTER = 'boolean-filter'
67
+ end
68
+
69
+ module FilterUtils
70
+ def convert_custom_filter(filter)
71
+ case filter[:filter_type]
72
+ when FilterType::STRING_FILTER
73
+ validate_string_filter(filter)
74
+ filter.merge(field_type: FieldType::STRING)
75
+ when FilterType::STRING_IN_FILTER
76
+ validate_string_in_filter(filter)
77
+ filter.merge(field_type: FieldType::STRING)
78
+ when FilterType::NUMERIC_FILTER
79
+ validate_numeric_filter(filter)
80
+ filter.merge(field_type: FieldType::NUMBER)
81
+ when FilterType::NULL_FILTER
82
+ validate_null_filter(filter)
83
+ filter.merge(field_type: FieldType::NULL)
84
+ when FilterType::BOOLEAN_FILTER
85
+ validate_boolean_filter(filter)
86
+ filter.merge(field_type: FieldType::BOOLEAN)
87
+ when FilterType::DATE_FILTER
88
+ validate_date_filter(filter)
89
+ filter.merge(field_type: FieldType::DATE)
90
+ when FilterType::DATE_CUSTOM_FILTER
91
+ validate_date_custom_filter(filter)
92
+ filter.merge(field_type: FieldType::DATE)
93
+ when FilterType::DATE_COMPARISON_FILTER
94
+ validate_date_comparison_filter(filter)
95
+ filter.merge(field_type: FieldType::DATE)
96
+ end
97
+ end
98
+ end
99
+
100
+ private
101
+
102
+ def validate_string_filter(filter)
103
+ raise "Invalid value for StringFilter" unless filter[:value].is_a?(String)
104
+ raise "Invalid operator for StringFilter" unless StringOperator.constants.map { |c| StringOperator.const_get(c) }.include?(filter[:operator])
105
+ end
106
+
107
+ def validate_string_in_filter(filter)
108
+ raise "Invalid value for StringInFilter" unless filter[:value].is_a?(Array)
109
+ raise "Invalid operator for StringInFilter" unless StringOperator.constants.map { |c| StringOperator.const_get(c) }.include?(filter[:operator])
110
+ end
111
+
112
+ def validate_numeric_filter(filter)
113
+ raise "Invalid value for NumericFilter" unless filter[:value].is_a?(Numeric)
114
+ raise "Invalid operator for NumericFilter" unless NumberOperator.constants.map { |c| NumberOperator.const_get(c) }.include?(filter[:operator])
115
+ end
116
+
117
+ def validate_null_filter(filter)
118
+ raise "Invalid value for NullFilter" unless filter[:value].nil?
119
+ raise "Invalid operator for NullFilter" unless NullOperator.constants.map { |c| NullOperator.const_get(c) }.include?(filter[:operator])
120
+ end
121
+
122
+ def validate_boolean_filter(filter)
123
+ raise "Invalid value for BooleanFilter" unless [true, false].include?(filter[:value])
124
+ raise "Invalid operator for BooleanFilter" unless BoolOperator.constants.map { |c| BoolOperator.const_get(c) }.include?(filter[:operator])
125
+ end
126
+
127
+ def validate_date_filter(filter)
128
+ value = filter[:value]
129
+ raise "Invalid value for DateFilter" unless value.is_a?(Hash) && value[:value].is_a?(Numeric) && value[:unit].is_a?(String)
130
+ raise "Invalid operator for DateFilter" unless DateOperator.constants.map { |c| DateOperator.const_get(c) }.include?(filter[:operator])
131
+ end
132
+
133
+ def validate_date_custom_filter(filter)
134
+ value = filter[:value]
135
+ raise "Invalid value for DateCustomFilter" unless value.is_a?(Hash) && value[:start_date].is_a?(String) && value[:end_date].is_a?(String)
136
+ raise "Invalid operator for DateCustomFilter" unless DateOperator.constants.map { |c| DateOperator.const_get(c) }.include?(filter[:operator])
137
+ end
138
+
139
+ def validate_date_comparison_filter(filter)
140
+ raise "Invalid value for DateComparisonFilter" unless filter[:value].is_a?(String)
141
+ raise "Invalid operator for DateComparisonFilter" unless DateOperator.constants.map { |c| DateOperator.const_get(c) }.include?(filter[:operator])
142
+ end