quill-sql 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/lib/quill-sql.rb ADDED
@@ -0,0 +1,370 @@
1
+ # Ruby 2.3 Compatible Implementation
2
+ require 'json'
3
+ require 'net/http'
4
+ require 'uri'
5
+ require 'dotenv'
6
+ require_relative './db/cached_connection'
7
+ require_relative './db/db_helper'
8
+ require_relative 'utils/tenants'
9
+ require_relative 'models/filters'
10
+ require_relative 'utils/schema'
11
+ require_relative 'utils/run_query_processes'
12
+
13
+ Dotenv.load(File.expand_path('../.env', __dir__))
14
+
15
+ module DatabaseType
16
+ POSTGRESQL = 'postgresql'.freeze
17
+ SNOWFLAKE = 'snowflake'.freeze
18
+ BIGQUERY = 'bigquery'.freeze
19
+ MYSQL = 'mysql'.freeze
20
+ CLICKHOUSE = 'clickhouse'.freeze
21
+
22
+ def self.valid?(type)
23
+ constants.map { |c| const_get(c) }.include?(type)
24
+ end
25
+ end
26
+
27
+ SINGLE_TENANT = "QUILL_SINGLE_TENANT".freeze
28
+ ALL_TENANTS = "QUILL_ALL_TENANTS".freeze
29
+
30
+ HOST = (ENV['ENV'] == 'development' ?
31
+ 'http://localhost:8080' :
32
+ 'https://quill-344421.uc.r.appspot.com').freeze
33
+
34
+ FLAG_TASKS = Set.new(['dashboard', 'report', 'item', 'report-info', 'filter-options']).freeze
35
+
36
+ class QuillAPIError < StandardError; end
37
+
38
+ class Quill
39
+ attr_reader :target_connection, :base_url, :config
40
+
41
+ def initialize(options = {})
42
+ validate_options(options)
43
+ @base_url = options[:metadata_server_url] || HOST
44
+ @config = { headers: { 'Authorization' => "Bearer #{options[:private_key]}" } }
45
+
46
+ credentials = options[:database_config]
47
+ if options[:database_connection_string]
48
+ credentials = DatabaseHelper.get_database_credentials(
49
+ options[:database_type],
50
+ options[:database_connection_string]
51
+ )
52
+ end
53
+
54
+ @target_connection = CachedConnection.new(
55
+ options[:database_type],
56
+ credentials,
57
+ options[:cache] || {}
58
+ )
59
+ end
60
+
61
+ def query(params = {})
62
+ validate_query_params(params)
63
+
64
+ tenants = params[:tenants]
65
+ flags = params[:flags]
66
+ metadata = params[:metadata]
67
+ metadata = metadata.transform_keys(&:to_sym)
68
+ filters = params[:filters]
69
+
70
+ # TODO: Add support for multiple tenants
71
+ if tenants&.any?
72
+ @target_connection.tenant_ids = TenantUtils.extract_tenant_ids(tenants)
73
+ end
74
+
75
+ response_metadata = {}
76
+
77
+ unless metadata[:task]
78
+ return { error: "Missing task.", status: "error", data: {} }
79
+ end
80
+
81
+ tenant_flags = nil
82
+
83
+ begin
84
+ # If the task requires flags to be synthesized from tenants
85
+ if FLAG_TASKS.include?(metadata[:task]) &&
86
+ tenants&.first != ALL_TENANTS &&
87
+ tenants&.first != SINGLE_TENANT &&
88
+ (metadata[:task] != 'filter-options' ||
89
+ !metadata[:reportId])
90
+
91
+ response = post_quill('tenant-mapped-flags', {
92
+ reportId: metadata[:reportId] || metadata[:dashboardItemId],
93
+ dashboardName: metadata[:name],
94
+ tenants: tenants,
95
+ flags: flags
96
+ })
97
+
98
+ return {
99
+ status: "error",
100
+ error: response[:error],
101
+ data: response[:metadata] || {}
102
+ } if response[:error]
103
+
104
+ flag_query_results = run_queries(response[:queries], @target_connection.database_type)
105
+ tenant_flags = response[:metadata][:queryOrder].map.with_index do |tenant_field, index|
106
+ {
107
+ tenant_field: tenant_field,
108
+ flags: Set.new(
109
+ flag_query_results[:queryResults][index][:rows].map do |row|
110
+ row['quill_flag']
111
+ end
112
+ ).to_a
113
+ }
114
+ end
115
+ elsif tenants&.first == SINGLE_TENANT && flags
116
+ if flags.length && flags[0].is_a?(Hash)
117
+ tenant_flags = [{ tenant_field: SINGLE_TENANT, flags: flags }]
118
+ else
119
+ tenant_flags = flags.map do |flag|
120
+ { tenant_field: SINGLE_TENANT, flags: flag }
121
+ end
122
+ end
123
+ end
124
+
125
+ pre_query_results = if metadata[:preQueries]
126
+ run_queries(
127
+ metadata[:preQueries],
128
+ @target_connection.database_type,
129
+ metadata[:databaseType],
130
+ metadata[:runQueryConfig]
131
+ )
132
+ else
133
+ {}
134
+ end
135
+
136
+ if metadata.dig(:runQueryConfig, "overridePost")
137
+ return {
138
+ data: { queryResults: pre_query_results },
139
+ status: "success"
140
+ }
141
+ end
142
+
143
+ response = post_quill(metadata[:task], {
144
+ **metadata,
145
+ sdk_filters: filters&.map { |filter| FilterUtils.convert_custom_filter(filter) },
146
+ **pre_query_results,
147
+ tenants: tenants,
148
+ flags: tenant_flags,
149
+ viewQuery: metadata[:preQueries]&.first
150
+ })
151
+
152
+ return {
153
+ status: "error",
154
+ error: response[:error],
155
+ data: response[:metadata] || {}
156
+ } if response[:error]
157
+
158
+ response_metadata = response[:metadata] if response[:metadata]
159
+
160
+ results = run_queries(
161
+ response[:queries],
162
+ @target_connection.database_type,
163
+ metadata[:database_type],
164
+ response_metadata.dig(:runQueryConfig)
165
+ )
166
+ if results[:mappedArray] && response_metadata.dig(:runQueryConfig, :arrayToMap)
167
+ array_to_map = response_metadata.dig(:runQueryConfig, :arrayToMap)
168
+ results[:mappedArray].each_with_index do |array, index|
169
+ target_item = response_metadata.dig(array_to_map[:arrayName].to_sym, index)
170
+ if target_item
171
+ target_item[array_to_map[:field]] = array
172
+ end
173
+ end
174
+ results.delete(:mappedArray)
175
+ end
176
+
177
+ if results[:queryResults]&.size == 1
178
+ query_results = results[:queryResults].first
179
+ response_metadata[:rows] = query_results[:rows] if query_results[:rows]
180
+ response_metadata[:fields] = query_results[:fields] if query_results[:fields]
181
+ end
182
+
183
+ {
184
+ data: response_metadata,
185
+ queries: results,
186
+ status: "success"
187
+ }
188
+ rescue StandardError => e
189
+ if metadata[:task] == "update-view"
190
+ post_quill("set-broken-view", {
191
+ table: metadata[:name],
192
+ clientId: metadata[:clientId],
193
+ error: e.message
194
+ })
195
+ end
196
+
197
+ {
198
+ status: "error",
199
+ error: e.message,
200
+ data: response_metadata || {}
201
+ }
202
+ end
203
+ end
204
+
205
+ private
206
+
207
+ def validate_options(options)
208
+ if !options[:private_key]
209
+ raise ArgumentError, "Private key is required"
210
+ end
211
+ if !options[:database_type]
212
+ raise ArgumentError, "Database type is required"
213
+ end
214
+ if !DatabaseType.valid?(options[:database_type])
215
+ raise ArgumentError, "Invalid database type"
216
+ end
217
+ if !options[:database_connection_string] && !options[:database_config]
218
+ raise ArgumentError, "Either database_connection_string or database_config is required"
219
+ end
220
+ end
221
+
222
+ def validate_query_params(params)
223
+ if !params[:tenants]
224
+ raise ArgumentError, "tenants is required"
225
+ end
226
+ if !params[:metadata]
227
+ raise ArgumentError, "metadata is required"
228
+ end
229
+ end
230
+
231
+ def run_queries(queries, pk_database_type, database_type = nil, run_query_config = nil)
232
+ return { queryResults: [] } unless queries
233
+
234
+ if database_type && database_type.downcase != pk_database_type.downcase
235
+ return {
236
+ dbMismatched: true,
237
+ backendDatabaseType: pk_database_type,
238
+ queryResults: []
239
+ }
240
+ end
241
+
242
+ results = {}
243
+
244
+ run_query_config = run_query_config.transform_keys(&:to_sym) if run_query_config
245
+
246
+ if run_query_config&.dig(:arrayToMap)
247
+ mapped_array = RunQueryProcesses.map_queries(queries, @target_connection)
248
+ results[:queryResults] = []
249
+ results[:mappedArray] = mapped_array
250
+
251
+ elsif run_query_config&.dig(:getColumns)
252
+ query_result = @target_connection.query("#{queries[0].gsub(/;/, '')} limit 1000")
253
+ columns = query_result[:fields].map do |field|
254
+ {
255
+ fieldType: Schema.convert_type_to_postgres(field[:dataTypeID]),
256
+ name: field[:name],
257
+ displayName: field[:name],
258
+ isVisible: true,
259
+ field: field[:name]
260
+ }
261
+ end
262
+ results[:columns] = columns
263
+ elsif run_query_config&.dig(:getColumnsForSchema)
264
+ query_results = queries.map do |table|
265
+ if table[:viewQuery].nil? || (!table[:isSelectStar] && !table[:customFieldInfo])
266
+ table
267
+ else
268
+ limit = run_query_config[:limitBy] ? " limit #{run_query_config[:limitBy]}" : ""
269
+ begin
270
+ query_result = @target_connection.query("#{table[:viewQuery].gsub(/;/, '')} #{limit}")
271
+ columns = query_result[:fields].map do |field|
272
+ {
273
+ fieldType: Schema.convert_type_to_postgres(field[:dataTypeID]),
274
+ name: field[:name],
275
+ displayName: field[:name],
276
+ isVisible: true,
277
+ field: field[:name]
278
+ }
279
+ end
280
+ table.merge(columns: columns, rows: query_result[:rows])
281
+ rescue StandardError => e
282
+ table.merge(error: "Error fetching columns: #{e.message}")
283
+ end
284
+ end
285
+ end
286
+
287
+ results[:queryResults] = query_results
288
+
289
+ if run_query_config&.dig(:fieldsToRemove)
290
+ results[:queryResults] = query_results.map do |table|
291
+ removed_columns = table[:columns]&.reject { |column| run_query_config[:fieldsToRemove]&.include?(column[:name]) }
292
+ table.merge(columns: removed_columns)
293
+ end
294
+ end
295
+
296
+ elsif run_query_config&.dig(:getTables)
297
+ schema_names = run_query_config[:schemaNames] || run_query_config[:schema]
298
+ tables_info = DatabaseHelper.get_tables_by_schema_by_database(@target_connection.database_type, @target_connection.pool, schema_names)
299
+ schema_info = DatabaseHelper.get_column_info_by_schema_by_database(@target_connection.database_type, @target_connection.pool, run_query_config[:schema], tables_info)
300
+ return schema_info
301
+
302
+ else
303
+ modified_queries = queries
304
+ if run_query_config&.dig(:limitThousand)
305
+ modified_queries = queries.map { |q| "#{q.gsub(/;/, '')} limit 1000;" }
306
+ elsif run_query_config&.dig(:limitBy)
307
+ modified_queries = queries.map { |q| "#{q.gsub(/;/, '')} limit #{run_query_config[:limitBy]};" }
308
+ end
309
+
310
+ query_results = modified_queries.map { |query| @target_connection.query(query) }
311
+ results[:queryResults] = query_results
312
+
313
+ if run_query_config&.dig(:fieldsToRemove)
314
+ results[:queryResults] = query_results.map do |result|
315
+ RunQueryProcesses.remove_fields(result, run_query_config[:fieldsToRemove])
316
+ end
317
+ end
318
+
319
+ if run_query_config&.dig(:convertDatatypes)
320
+ results = query_results.map do |result|
321
+ {
322
+ fields: result[:fields].map do |field|
323
+ field.merge(
324
+ fieldType: Schema.convert_type_to_postgres(field[:dataTypeID]),
325
+ isVisible: true,
326
+ field: field[:name],
327
+ displayName: field[:name],
328
+ name: field[:name]
329
+ )
330
+ end,
331
+ rows: result[:rows]
332
+ }
333
+ end
334
+ end
335
+ end
336
+
337
+ results
338
+ end
339
+
340
+ def post_quill(path, payload)
341
+ uri = URI("#{@base_url}/sdk/#{path}")
342
+ http = Net::HTTP.new(uri.host, uri.port)
343
+ http.use_ssl = uri.scheme == 'https'
344
+ request = Net::HTTP::Post.new(uri)
345
+ request['Authorization'] = @config[:headers]["Authorization"]
346
+ request['Content-Type'] = 'application/json'
347
+ request.body = payload.to_json
348
+
349
+ response = http.request(request)
350
+
351
+ if !response.is_a?(Net::HTTPSuccess)
352
+ body = JSON.parse(response.body, symbolize_names: true)
353
+ raise QuillAPIError.new("#{body[:error]}")
354
+ end
355
+
356
+ JSON.parse(response.body, symbolize_names: true)
357
+ rescue JSON::ParserError => e
358
+ raise QuillAPIError.new("Invalid JSON response: #{e.message}")
359
+ rescue => e
360
+ raise QuillAPIError.new(e.message)
361
+ end
362
+
363
+ def async_dispose
364
+ close
365
+ end
366
+
367
+ def close
368
+ @target_connection&.close
369
+ end
370
+ end
@@ -0,0 +1,29 @@
1
+ class TableSchemaInfo
2
+ attr_accessor :field_type, :name, :display_name, :is_visible
3
+
4
+ def initialize(field_type:, name:, display_name:, is_visible:)
5
+ @field_type = field_type
6
+ @name = name
7
+ @display_name = display_name
8
+ @is_visible = is_visible
9
+ end
10
+ end
11
+
12
+ module RunQueryProcesses
13
+ def self.remove_fields(query_results, fields_to_remove)
14
+ fields = query_results[:fields].reject { |field| fields_to_remove.include?(field[:name]) }
15
+ rows = query_results[:rows].map do |row|
16
+ fields_to_remove.each { |field| row.delete(field) }
17
+ row
18
+ end
19
+ { fields: fields, rows: rows }
20
+ end
21
+
22
+ def self.map_queries(queries, target_connection)
23
+ queries.map do |query|
24
+ result = target_connection.query(query)
25
+ result[:rows]
26
+ end
27
+ end
28
+
29
+ end
@@ -0,0 +1,8 @@
1
+ require_relative '../assets/pg_types'
2
+
3
+ module Schema
4
+ def self.convert_type_to_postgres(data_type_id)
5
+ type = PG_TYPES.find { |t| data_type_id == t[:oid] }&.dig(:typname)
6
+ type || 'varchar'
7
+ end
8
+ end
@@ -0,0 +1,21 @@
1
+ module TenantUtils
2
+ def self.extract_tenant_ids(tenants)
3
+ if tenants[0].is_a?(String) || tenants[0].is_a?(Numeric)
4
+ tenants
5
+ elsif tenants[0].is_a?(Hash) && tenants[0].key?('tenant_ids')
6
+ tenants[0]['tenant_ids']
7
+ else
8
+ raise 'Invalid format for tenants'
9
+ end
10
+ end
11
+
12
+ def self.extract_tenant_field(tenants, dashboard_owner)
13
+ if tenants[0].is_a?(String) || tenants[0].is_a?(Numeric)
14
+ dashboard_owner
15
+ elsif tenants[0].is_a?(Hash) && tenants[0].key?('tenant_field')
16
+ tenants[0]['tenant_field']
17
+ else
18
+ raise 'Invalid format for tenants'
19
+ end
20
+ end
21
+ end
metadata ADDED
@@ -0,0 +1,167 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: quill-sql
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Shawn Magee, Albert Yan
8
+ - Sam Bishop
9
+ autorequire:
10
+ bindir: bin
11
+ cert_chain: []
12
+ date: 2025-01-14 00:00:00.000000000 Z
13
+ dependencies:
14
+ - !ruby/object:Gem::Dependency
15
+ name: json
16
+ requirement: !ruby/object:Gem::Requirement
17
+ requirements:
18
+ - - ">="
19
+ - !ruby/object:Gem::Version
20
+ version: '2.0'
21
+ type: :runtime
22
+ prerelease: false
23
+ version_requirements: !ruby/object:Gem::Requirement
24
+ requirements:
25
+ - - ">="
26
+ - !ruby/object:Gem::Version
27
+ version: '2.0'
28
+ - !ruby/object:Gem::Dependency
29
+ name: activesupport
30
+ requirement: !ruby/object:Gem::Requirement
31
+ requirements:
32
+ - - "~>"
33
+ - !ruby/object:Gem::Version
34
+ version: 7.0.0
35
+ type: :runtime
36
+ prerelease: false
37
+ version_requirements: !ruby/object:Gem::Requirement
38
+ requirements:
39
+ - - "~>"
40
+ - !ruby/object:Gem::Version
41
+ version: 7.0.0
42
+ - !ruby/object:Gem::Dependency
43
+ name: click_house
44
+ requirement: !ruby/object:Gem::Requirement
45
+ requirements:
46
+ - - "~>"
47
+ - !ruby/object:Gem::Version
48
+ version: 2.1.2
49
+ type: :runtime
50
+ prerelease: false
51
+ version_requirements: !ruby/object:Gem::Requirement
52
+ requirements:
53
+ - - "~>"
54
+ - !ruby/object:Gem::Version
55
+ version: 2.1.2
56
+ - !ruby/object:Gem::Dependency
57
+ name: redis
58
+ requirement: !ruby/object:Gem::Requirement
59
+ requirements:
60
+ - - ">="
61
+ - !ruby/object:Gem::Version
62
+ version: '0'
63
+ type: :runtime
64
+ prerelease: false
65
+ version_requirements: !ruby/object:Gem::Requirement
66
+ requirements:
67
+ - - ">="
68
+ - !ruby/object:Gem::Version
69
+ version: '0'
70
+ - !ruby/object:Gem::Dependency
71
+ name: dotenv
72
+ requirement: !ruby/object:Gem::Requirement
73
+ requirements:
74
+ - - ">="
75
+ - !ruby/object:Gem::Version
76
+ version: '0'
77
+ type: :runtime
78
+ prerelease: false
79
+ version_requirements: !ruby/object:Gem::Requirement
80
+ requirements:
81
+ - - ">="
82
+ - !ruby/object:Gem::Version
83
+ version: '0'
84
+ - !ruby/object:Gem::Dependency
85
+ name: rubocop
86
+ requirement: !ruby/object:Gem::Requirement
87
+ requirements:
88
+ - - ">="
89
+ - !ruby/object:Gem::Version
90
+ version: '0'
91
+ type: :development
92
+ prerelease: false
93
+ version_requirements: !ruby/object:Gem::Requirement
94
+ requirements:
95
+ - - ">="
96
+ - !ruby/object:Gem::Version
97
+ version: '0'
98
+ - !ruby/object:Gem::Dependency
99
+ name: sinatra
100
+ requirement: !ruby/object:Gem::Requirement
101
+ requirements:
102
+ - - ">="
103
+ - !ruby/object:Gem::Version
104
+ version: '0'
105
+ type: :development
106
+ prerelease: false
107
+ version_requirements: !ruby/object:Gem::Requirement
108
+ requirements:
109
+ - - ">="
110
+ - !ruby/object:Gem::Version
111
+ version: '0'
112
+ - !ruby/object:Gem::Dependency
113
+ name: sinatra-cors
114
+ requirement: !ruby/object:Gem::Requirement
115
+ requirements:
116
+ - - ">="
117
+ - !ruby/object:Gem::Version
118
+ version: '0'
119
+ type: :development
120
+ prerelease: false
121
+ version_requirements: !ruby/object:Gem::Requirement
122
+ requirements:
123
+ - - ">="
124
+ - !ruby/object:Gem::Version
125
+ version: '0'
126
+ description: Quill sdk for Ruby. See quill.co for more information.
127
+ email:
128
+ - shawn@quill.co
129
+ executables: []
130
+ extensions: []
131
+ extra_rdoc_files: []
132
+ files:
133
+ - LICENSE
134
+ - README.md
135
+ - lib/assets/pg_types.rb
136
+ - lib/db/cached_connection.rb
137
+ - lib/db/clickhouse.rb
138
+ - lib/db/db_helper.rb
139
+ - lib/models/filters.rb
140
+ - lib/quill-sql.rb
141
+ - lib/utils/run_query_processes.rb
142
+ - lib/utils/schema.rb
143
+ - lib/utils/tenants.rb
144
+ homepage: https://github.com/quill-sql/quill-ruby
145
+ licenses:
146
+ - MIT
147
+ metadata: {}
148
+ post_install_message:
149
+ rdoc_options: []
150
+ require_paths:
151
+ - lib
152
+ required_ruby_version: !ruby/object:Gem::Requirement
153
+ requirements:
154
+ - - ">="
155
+ - !ruby/object:Gem::Version
156
+ version: 2.7.0
157
+ required_rubygems_version: !ruby/object:Gem::Requirement
158
+ requirements:
159
+ - - ">="
160
+ - !ruby/object:Gem::Version
161
+ version: '0'
162
+ requirements: []
163
+ rubygems_version: 3.0.3.1
164
+ signing_key:
165
+ specification_version: 4
166
+ summary: Quill
167
+ test_files: []