trino-client 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. checksums.yaml +7 -0
  2. data/.github/CODEOWNERS +1 -0
  3. data/.github/PULL_REQUEST_TEMPLATE.md +18 -0
  4. data/.github/workflows/ruby.yml +30 -0
  5. data/.gitignore +4 -0
  6. data/ChangeLog.md +168 -0
  7. data/Gemfile +7 -0
  8. data/LICENSE +202 -0
  9. data/README.md +131 -0
  10. data/Rakefile +45 -0
  11. data/lib/trino-client.rb +1 -0
  12. data/lib/trino/client.rb +23 -0
  13. data/lib/trino/client/client.rb +78 -0
  14. data/lib/trino/client/errors.rb +46 -0
  15. data/lib/trino/client/faraday_client.rb +242 -0
  16. data/lib/trino/client/model_versions/0.149.rb +1683 -0
  17. data/lib/trino/client/model_versions/0.153.rb +1719 -0
  18. data/lib/trino/client/model_versions/0.173.rb +1685 -0
  19. data/lib/trino/client/model_versions/0.178.rb +1964 -0
  20. data/lib/trino/client/model_versions/0.205.rb +2169 -0
  21. data/lib/trino/client/model_versions/303.rb +2574 -0
  22. data/lib/trino/client/model_versions/316.rb +2595 -0
  23. data/lib/trino/client/model_versions/351.rb +2726 -0
  24. data/lib/trino/client/models.rb +38 -0
  25. data/lib/trino/client/query.rb +144 -0
  26. data/lib/trino/client/statement_client.rb +279 -0
  27. data/lib/trino/client/version.rb +20 -0
  28. data/modelgen/model_versions.rb +280 -0
  29. data/modelgen/modelgen.rb +119 -0
  30. data/modelgen/models.rb +31 -0
  31. data/modelgen/trino_models.rb +270 -0
  32. data/release.rb +56 -0
  33. data/spec/basic_query_spec.rb +82 -0
  34. data/spec/client_spec.rb +75 -0
  35. data/spec/gzip_spec.rb +40 -0
  36. data/spec/model_spec.rb +35 -0
  37. data/spec/spec_helper.rb +42 -0
  38. data/spec/statement_client_spec.rb +637 -0
  39. data/spec/tpch/q01.sql +21 -0
  40. data/spec/tpch/q02.sql +43 -0
  41. data/spec/tpch_query_spec.rb +41 -0
  42. data/trino-client.gemspec +31 -0
  43. metadata +211 -0
@@ -0,0 +1,1719 @@
1
+ #
2
+ # Trino client for Ruby
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ module Trino::Client::ModelVersions
17
+
18
+ ####
19
+ ## lib/trino/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command.
20
+ ## You should not edit this file directly. To modify the class definitions, edit
21
+ ## modelgen/model_versions.rb file and run "rake modelgen:all".
22
+ ##
23
+
24
+ module V0_153
25
+ class Base < Struct
26
+ class << self
27
+ alias_method :new_struct, :new
28
+
29
+ def new(*args)
30
+ new_struct(*args) do
31
+ # make it immutable
32
+ undef_method :"[]="
33
+ members.each do |m|
34
+ undef_method :"#{m}="
35
+ end
36
+
37
+ # replace constructor to receive hash instead of array
38
+ alias_method :initialize_struct, :initialize
39
+
40
+ def initialize(params={})
41
+ initialize_struct(*members.map {|m| params[m] })
42
+ end
43
+ end
44
+ end
45
+ end
46
+ end
47
+
48
+ class StageId < String
49
+ def initialize(str)
50
+ super
51
+ splitted = split('.', 2)
52
+ @query_id = splitted[0]
53
+ @id = splitted[1]
54
+ end
55
+
56
+ attr_reader :query_id, :id
57
+ end
58
+
59
+ class TaskId < String
60
+ def initialize(str)
61
+ super
62
+ splitted = split('.', 3)
63
+ @stage_id = StageId.new("#{splitted[0]}.#{splitted[1]}")
64
+ @query_id = @stage_id.query_id
65
+ @id = splitted[2]
66
+ end
67
+
68
+ attr_reader :query_id, :stage_id, :id
69
+ end
70
+
71
+ class ConnectorSession < Hash
72
+ def initialize(hash)
73
+ super()
74
+ merge!(hash)
75
+ end
76
+ end
77
+
78
+ module PlanNode
79
+ def self.decode(hash)
80
+ unless hash.is_a?(Hash)
81
+ raise TypeError, "Can't convert #{hash.class} to Hash"
82
+ end
83
+ model_class = case hash["@type"]
84
+ when "output" then OutputNode
85
+ when "project" then ProjectNode
86
+ when "tablescan" then TableScanNode
87
+ when "values" then ValuesNode
88
+ when "aggregation" then AggregationNode
89
+ when "markDistinct" then MarkDistinctNode
90
+ when "filter" then FilterNode
91
+ when "window" then WindowNode
92
+ when "rowNumber" then RowNumberNode
93
+ when "topnRowNumber" then TopNRowNumberNode
94
+ when "limit" then LimitNode
95
+ when "distinctlimit" then DistinctLimitNode
96
+ when "topn" then TopNNode
97
+ when "sample" then SampleNode
98
+ when "sort" then SortNode
99
+ when "remoteSource" then RemoteSourceNode
100
+ when "join" then JoinNode
101
+ when "semijoin" then SemiJoinNode
102
+ when "indexjoin" then IndexJoinNode
103
+ when "indexsource" then IndexSourceNode
104
+ when "tablewriter" then TableWriterNode
105
+ when "delete" then DeleteNode
106
+ when "metadatadelete" then MetadataDeleteNode
107
+ when "tablecommit" then TableFinishNode
108
+ when "unnest" then UnnestNode
109
+ when "exchange" then ExchangeNode
110
+ when "union" then UnionNode
111
+ when "intersect" then IntersectNode
112
+ when "scalar" then EnforceSingleRowNode
113
+ when "groupid" then GroupIdNode
114
+ when "explainAnalyze" then ExplainAnalyzeNode
115
+ when "apply" then ApplyNode
116
+ end
117
+ if model_class
118
+ node = model_class.decode(hash)
119
+ class << node
120
+ attr_accessor :plan_node_type
121
+ end
122
+ node.plan_node_type = hash['@type']
123
+ node
124
+ end
125
+ end
126
+ end
127
+
128
+ # io.airlift.stats.Distribution.DistributionSnapshot
129
+ class << DistributionSnapshot =
130
+ Base.new(:max_error, :count, :total, :p01, :p05, :p10, :p25, :p50, :p75, :p90, :p95, :p99, :min, :max)
131
+ def decode(hash)
132
+ unless hash.is_a?(Hash)
133
+ raise TypeError, "Can't convert #{hash.class} to Hash"
134
+ end
135
+ obj = allocate
136
+ obj.send(:initialize_struct,
137
+ hash["maxError"],
138
+ hash["count"],
139
+ hash["total"],
140
+ hash["p01"],
141
+ hash["p05"],
142
+ hash["p10"],
143
+ hash["p25"],
144
+ hash["p50"],
145
+ hash["p75"],
146
+ hash["p90"],
147
+ hash["p95"],
148
+ hash["p99"],
149
+ hash["min"],
150
+ hash["max"],
151
+ )
152
+ obj
153
+ end
154
+ end
155
+
156
+ # This is a hybrid of JoinNode.EquiJoinClause and IndexJoinNode.EquiJoinClause
157
+ class << EquiJoinClause =
158
+ Base.new(:left, :right, :probe, :index)
159
+ def decode(hash)
160
+ unless hash.is_a?(Hash)
161
+ raise TypeError, "Can't convert #{hash.class} to Hash"
162
+ end
163
+ obj = allocate
164
+ obj.send(:initialize_struct,
165
+ hash["left"],
166
+ hash["right"],
167
+ hash["probe"],
168
+ hash["index"],
169
+ )
170
+ obj
171
+ end
172
+ end
173
+
174
+ class << WriterTarget =
175
+ Base.new(:type, :handle)
176
+ def decode(hash)
177
+ unless hash.is_a?(Hash)
178
+ raise TypeError, "Can't convert #{hash.class} to Hash"
179
+ end
180
+ obj = allocate
181
+ model_class = case hash["@type"]
182
+ when "CreateHandle" then OutputTableHandle
183
+ when "InsertHandle" then InsertTableHandle
184
+ when "DeleteHandle" then TableHandle
185
+ end
186
+ obj.send(:initialize_struct,
187
+ hash["@type"],
188
+ model_class.decode(hash['handle'])
189
+ )
190
+ obj
191
+ end
192
+ end
193
+
194
+ class << DeleteHandle =
195
+ Base.new(:handle)
196
+ def decode(hash)
197
+ unless hash.is_a?(Hash)
198
+ raise TypeError, "Can't convert #{hash.class} to Hash"
199
+ end
200
+ obj = allocate
201
+ obj.send(:initialize_struct,
202
+ TableHandle.decode(hash['handle'])
203
+ )
204
+ obj
205
+ end
206
+ end
207
+
208
+ # Inner classes
209
+ class << Specification =
210
+ Base.new(:partition_by, :order_by, :orderings, :frame, :pages_added)
211
+ def decode(hash)
212
+ unless hash.is_a?(Hash)
213
+ raise TypeError, "Can't convert #{hash.class} to Hash"
214
+ end
215
+ obj = allocate
216
+ obj.send(:initialize_struct,
217
+ hash["partitionBy"],
218
+ hash["orderBy"],
219
+ hash["orderings"],
220
+ hash["frame"],
221
+ )
222
+ obj
223
+ end
224
+ end
225
+
226
+ class << ArgumentBinding =
227
+ Base.new(:column, :constant)
228
+ def decode(hash)
229
+ unless hash.is_a?(Hash)
230
+ raise TypeError, "Can't convert #{hash.class} to Hash"
231
+ end
232
+ obj = allocate
233
+ obj.send(:initialize_struct,
234
+ hash["column"],
235
+ hash["constant"]
236
+ )
237
+ obj
238
+ end
239
+ end
240
+
241
+ ##
242
+ # Those model classes are automatically generated
243
+ #
244
+
245
+ class << AggregationNode =
246
+ Base.new(:id, :source, :aggregations, :functions, :masks, :grouping_sets, :step, :sample_weight, :confidence, :hash_symbol, :group_id_symbol)
247
+ def decode(hash)
248
+ unless hash.is_a?(Hash)
249
+ raise TypeError, "Can't convert #{hash.class} to Hash"
250
+ end
251
+ obj = allocate
252
+ obj.send(:initialize_struct,
253
+ hash["id"],
254
+ hash["source"] && PlanNode.decode(hash["source"]),
255
+ hash["aggregations"],
256
+ hash["functions"] && Hash[hash["functions"].to_a.map! {|k,v| [k, Signature.decode(v)] }],
257
+ hash["masks"],
258
+ hash["groupingSets"],
259
+ hash["step"] && hash["step"].downcase.to_sym,
260
+ hash["sampleWeight"],
261
+ hash["confidence"],
262
+ hash["hashSymbol"],
263
+ hash["groupIdSymbol"],
264
+ )
265
+ obj
266
+ end
267
+ end
268
+
269
+ class << ApplyNode =
270
+ Base.new(:id, :input, :subquery, :correlation)
271
+ def decode(hash)
272
+ unless hash.is_a?(Hash)
273
+ raise TypeError, "Can't convert #{hash.class} to Hash"
274
+ end
275
+ obj = allocate
276
+ obj.send(:initialize_struct,
277
+ hash["id"],
278
+ hash["input"] && PlanNode.decode(hash["input"]),
279
+ hash["subquery"] && PlanNode.decode(hash["subquery"]),
280
+ hash["correlation"],
281
+ )
282
+ obj
283
+ end
284
+ end
285
+
286
+ class << BufferInfo =
287
+ Base.new(:buffer_id, :finished, :buffered_pages, :pages_sent, :page_buffer_info)
288
+ def decode(hash)
289
+ unless hash.is_a?(Hash)
290
+ raise TypeError, "Can't convert #{hash.class} to Hash"
291
+ end
292
+ obj = allocate
293
+ obj.send(:initialize_struct,
294
+ hash["bufferId"],
295
+ hash["finished"],
296
+ hash["bufferedPages"],
297
+ hash["pagesSent"],
298
+ hash["pageBufferInfo"] && PageBufferInfo.decode(hash["pageBufferInfo"]),
299
+ )
300
+ obj
301
+ end
302
+ end
303
+
304
+ class << ClientColumn =
305
+ Base.new(:name, :type, :type_signature)
306
+ def decode(hash)
307
+ unless hash.is_a?(Hash)
308
+ raise TypeError, "Can't convert #{hash.class} to Hash"
309
+ end
310
+ obj = allocate
311
+ obj.send(:initialize_struct,
312
+ hash["name"],
313
+ hash["type"],
314
+ hash["typeSignature"] && ClientTypeSignature.decode(hash["typeSignature"]),
315
+ )
316
+ obj
317
+ end
318
+ end
319
+
320
+ class << ClientStageStats =
321
+ Base.new(:stage_id, :state, :done, :nodes, :total_splits, :queued_splits, :running_splits, :completed_splits, :user_time_millis, :cpu_time_millis, :wall_time_millis, :processed_rows, :processed_bytes, :sub_stages)
322
+ def decode(hash)
323
+ unless hash.is_a?(Hash)
324
+ raise TypeError, "Can't convert #{hash.class} to Hash"
325
+ end
326
+ obj = allocate
327
+ obj.send(:initialize_struct,
328
+ hash["stageId"],
329
+ hash["state"],
330
+ hash["done"],
331
+ hash["nodes"],
332
+ hash["totalSplits"],
333
+ hash["queuedSplits"],
334
+ hash["runningSplits"],
335
+ hash["completedSplits"],
336
+ hash["userTimeMillis"],
337
+ hash["cpuTimeMillis"],
338
+ hash["wallTimeMillis"],
339
+ hash["processedRows"],
340
+ hash["processedBytes"],
341
+ hash["subStages"] && hash["subStages"].map {|h| ClientStageStats.decode(h) },
342
+ )
343
+ obj
344
+ end
345
+ end
346
+
347
+ class << ClientTypeSignature =
348
+ Base.new(:raw_type, :type_arguments, :literal_arguments, :arguments)
349
+ def decode(hash)
350
+ unless hash.is_a?(Hash)
351
+ raise TypeError, "Can't convert #{hash.class} to Hash"
352
+ end
353
+ obj = allocate
354
+ obj.send(:initialize_struct,
355
+ hash["rawType"],
356
+ hash["typeArguments"] && hash["typeArguments"].map {|h| ClientTypeSignature.decode(h) },
357
+ hash["literalArguments"],
358
+ hash["arguments"] && hash["arguments"].map {|h| ClientTypeSignatureParameter.decode(h) },
359
+ )
360
+ obj
361
+ end
362
+ end
363
+
364
+ class << ClientTypeSignatureParameter =
365
+ Base.new(:kind, :value)
366
+ def decode(hash)
367
+ unless hash.is_a?(Hash)
368
+ raise TypeError, "Can't convert #{hash.class} to Hash"
369
+ end
370
+ obj = allocate
371
+ obj.send(:initialize_struct,
372
+ hash["kind"] && hash["kind"].downcase.to_sym,
373
+ hash["value"],
374
+ )
375
+ obj
376
+ end
377
+ end
378
+
379
+ class << Column =
380
+ Base.new(:name, :type)
381
+ def decode(hash)
382
+ unless hash.is_a?(Hash)
383
+ raise TypeError, "Can't convert #{hash.class} to Hash"
384
+ end
385
+ obj = allocate
386
+ obj.send(:initialize_struct,
387
+ hash["name"],
388
+ hash["type"],
389
+ )
390
+ obj
391
+ end
392
+ end
393
+
394
+ class << DeleteNode =
395
+ Base.new(:id, :source, :target, :row_id, :outputs)
396
+ def decode(hash)
397
+ unless hash.is_a?(Hash)
398
+ raise TypeError, "Can't convert #{hash.class} to Hash"
399
+ end
400
+ obj = allocate
401
+ obj.send(:initialize_struct,
402
+ hash["id"],
403
+ hash["source"] && PlanNode.decode(hash["source"]),
404
+ hash["target"] && DeleteHandle.decode(hash["target"]),
405
+ hash["rowId"],
406
+ hash["outputs"],
407
+ )
408
+ obj
409
+ end
410
+ end
411
+
412
+ class << DistinctLimitNode =
413
+ Base.new(:id, :source, :limit, :partial, :hash_symbol)
414
+ def decode(hash)
415
+ unless hash.is_a?(Hash)
416
+ raise TypeError, "Can't convert #{hash.class} to Hash"
417
+ end
418
+ obj = allocate
419
+ obj.send(:initialize_struct,
420
+ hash["id"],
421
+ hash["source"] && PlanNode.decode(hash["source"]),
422
+ hash["limit"],
423
+ hash["partial"],
424
+ hash["hashSymbol"],
425
+ )
426
+ obj
427
+ end
428
+ end
429
+
430
+ class << DriverStats =
431
+ Base.new(:create_time, :start_time, :end_time, :queued_time, :elapsed_time, :memory_reservation, :system_memory_reservation, :total_scheduled_time, :total_cpu_time, :total_user_time, :total_blocked_time, :fully_blocked, :blocked_reasons, :raw_input_data_size, :raw_input_positions, :raw_input_read_time, :processed_input_data_size, :processed_input_positions, :output_data_size, :output_positions, :operator_stats)
432
+ def decode(hash)
433
+ unless hash.is_a?(Hash)
434
+ raise TypeError, "Can't convert #{hash.class} to Hash"
435
+ end
436
+ obj = allocate
437
+ obj.send(:initialize_struct,
438
+ hash["createTime"],
439
+ hash["startTime"],
440
+ hash["endTime"],
441
+ hash["queuedTime"],
442
+ hash["elapsedTime"],
443
+ hash["memoryReservation"],
444
+ hash["systemMemoryReservation"],
445
+ hash["totalScheduledTime"],
446
+ hash["totalCpuTime"],
447
+ hash["totalUserTime"],
448
+ hash["totalBlockedTime"],
449
+ hash["fullyBlocked"],
450
+ hash["blockedReasons"] && hash["blockedReasons"].map {|h| h.downcase.to_sym },
451
+ hash["rawInputDataSize"],
452
+ hash["rawInputPositions"],
453
+ hash["rawInputReadTime"],
454
+ hash["processedInputDataSize"],
455
+ hash["processedInputPositions"],
456
+ hash["outputDataSize"],
457
+ hash["outputPositions"],
458
+ hash["operatorStats"] && hash["operatorStats"].map {|h| OperatorStats.decode(h) },
459
+ )
460
+ obj
461
+ end
462
+ end
463
+
464
+ class << EnforceSingleRowNode =
465
+ Base.new(:id, :source)
466
+ def decode(hash)
467
+ unless hash.is_a?(Hash)
468
+ raise TypeError, "Can't convert #{hash.class} to Hash"
469
+ end
470
+ obj = allocate
471
+ obj.send(:initialize_struct,
472
+ hash["id"],
473
+ hash["source"] && PlanNode.decode(hash["source"]),
474
+ )
475
+ obj
476
+ end
477
+ end
478
+
479
+ class << ErrorCode =
480
+ Base.new(:code, :name, :type)
481
+ def decode(hash)
482
+ unless hash.is_a?(Hash)
483
+ raise TypeError, "Can't convert #{hash.class} to Hash"
484
+ end
485
+ obj = allocate
486
+ obj.send(:initialize_struct,
487
+ hash["code"],
488
+ hash["name"],
489
+ hash["type"] && hash["type"].downcase.to_sym,
490
+ )
491
+ obj
492
+ end
493
+ end
494
+
495
+ class << ErrorLocation =
496
+ Base.new(:line_number, :column_number)
497
+ def decode(hash)
498
+ unless hash.is_a?(Hash)
499
+ raise TypeError, "Can't convert #{hash.class} to Hash"
500
+ end
501
+ obj = allocate
502
+ obj.send(:initialize_struct,
503
+ hash["lineNumber"],
504
+ hash["columnNumber"],
505
+ )
506
+ obj
507
+ end
508
+ end
509
+
510
+ class << ExchangeNode =
511
+ Base.new(:id, :type, :scope, :partitioning_scheme, :sources, :inputs)
512
+ def decode(hash)
513
+ unless hash.is_a?(Hash)
514
+ raise TypeError, "Can't convert #{hash.class} to Hash"
515
+ end
516
+ obj = allocate
517
+ obj.send(:initialize_struct,
518
+ hash["id"],
519
+ hash["type"],
520
+ hash["scope"] && hash["scope"].downcase.to_sym,
521
+ hash["partitioningScheme"] && PartitioningScheme.decode(hash["partitioningScheme"]),
522
+ hash["sources"] && hash["sources"].map {|h| PlanNode.decode(h) },
523
+ hash["inputs"],
524
+ )
525
+ obj
526
+ end
527
+ end
528
+
529
+ class << ExecutionFailureInfo =
530
+ Base.new(:type, :message, :cause, :suppressed, :stack, :error_location, :error_code)
531
+ def decode(hash)
532
+ unless hash.is_a?(Hash)
533
+ raise TypeError, "Can't convert #{hash.class} to Hash"
534
+ end
535
+ obj = allocate
536
+ obj.send(:initialize_struct,
537
+ hash["type"],
538
+ hash["message"],
539
+ hash["cause"] && ExecutionFailureInfo.decode(hash["cause"]),
540
+ hash["suppressed"] && hash["suppressed"].map {|h| ExecutionFailureInfo.decode(h) },
541
+ hash["stack"],
542
+ hash["errorLocation"] && ErrorLocation.decode(hash["errorLocation"]),
543
+ hash["errorCode"] && ErrorCode.decode(hash["errorCode"]),
544
+ )
545
+ obj
546
+ end
547
+ end
548
+
549
+ class << ExplainAnalyzeNode =
550
+ Base.new(:id, :source, :output_symbol)
551
+ def decode(hash)
552
+ unless hash.is_a?(Hash)
553
+ raise TypeError, "Can't convert #{hash.class} to Hash"
554
+ end
555
+ obj = allocate
556
+ obj.send(:initialize_struct,
557
+ hash["id"],
558
+ hash["source"] && PlanNode.decode(hash["source"]),
559
+ hash["outputSymbol"],
560
+ )
561
+ obj
562
+ end
563
+ end
564
+
565
+ class << FailureInfo =
566
+ Base.new(:type, :message, :cause, :suppressed, :stack, :error_location)
567
+ def decode(hash)
568
+ unless hash.is_a?(Hash)
569
+ raise TypeError, "Can't convert #{hash.class} to Hash"
570
+ end
571
+ obj = allocate
572
+ obj.send(:initialize_struct,
573
+ hash["type"],
574
+ hash["message"],
575
+ hash["cause"] && FailureInfo.decode(hash["cause"]),
576
+ hash["suppressed"] && hash["suppressed"].map {|h| FailureInfo.decode(h) },
577
+ hash["stack"],
578
+ hash["errorLocation"] && ErrorLocation.decode(hash["errorLocation"]),
579
+ )
580
+ obj
581
+ end
582
+ end
583
+
584
+ class << FilterNode =
585
+ Base.new(:id, :source, :predicate)
586
+ def decode(hash)
587
+ unless hash.is_a?(Hash)
588
+ raise TypeError, "Can't convert #{hash.class} to Hash"
589
+ end
590
+ obj = allocate
591
+ obj.send(:initialize_struct,
592
+ hash["id"],
593
+ hash["source"] && PlanNode.decode(hash["source"]),
594
+ hash["predicate"],
595
+ )
596
+ obj
597
+ end
598
+ end
599
+
600
+ class << GroupIdNode =
601
+ Base.new(:id, :source, :grouping_sets, :identity_mappings, :group_id_symbol)
602
+ def decode(hash)
603
+ unless hash.is_a?(Hash)
604
+ raise TypeError, "Can't convert #{hash.class} to Hash"
605
+ end
606
+ obj = allocate
607
+ obj.send(:initialize_struct,
608
+ hash["id"],
609
+ hash["source"] && PlanNode.decode(hash["source"]),
610
+ hash["groupingSets"],
611
+ hash["identityMappings"],
612
+ hash["groupIdSymbol"],
613
+ )
614
+ obj
615
+ end
616
+ end
617
+
618
+ class << IndexHandle =
619
+ Base.new(:connector_id, :transaction_handle, :connector_handle)
620
+ def decode(hash)
621
+ unless hash.is_a?(Hash)
622
+ raise TypeError, "Can't convert #{hash.class} to Hash"
623
+ end
624
+ obj = allocate
625
+ obj.send(:initialize_struct,
626
+ hash["connectorId"],
627
+ hash["transactionHandle"],
628
+ hash["connectorHandle"],
629
+ )
630
+ obj
631
+ end
632
+ end
633
+
634
+ class << IndexJoinNode =
635
+ Base.new(:id, :type, :probe_source, :index_source, :criteria, :probe_hash_symbol, :index_hash_symbol)
636
+ def decode(hash)
637
+ unless hash.is_a?(Hash)
638
+ raise TypeError, "Can't convert #{hash.class} to Hash"
639
+ end
640
+ obj = allocate
641
+ obj.send(:initialize_struct,
642
+ hash["id"],
643
+ hash["type"],
644
+ hash["probeSource"] && PlanNode.decode(hash["probeSource"]),
645
+ hash["indexSource"] && PlanNode.decode(hash["indexSource"]),
646
+ hash["criteria"] && hash["criteria"].map {|h| EquiJoinClause.decode(h) },
647
+ hash["probeHashSymbol"],
648
+ hash["indexHashSymbol"],
649
+ )
650
+ obj
651
+ end
652
+ end
653
+
654
+ class << IndexSourceNode =
655
+ Base.new(:id, :index_handle, :table_handle, :table_layout, :lookup_symbols, :output_symbols, :assignments, :effective_tuple_domain)
656
+ def decode(hash)
657
+ unless hash.is_a?(Hash)
658
+ raise TypeError, "Can't convert #{hash.class} to Hash"
659
+ end
660
+ obj = allocate
661
+ obj.send(:initialize_struct,
662
+ hash["id"],
663
+ hash["indexHandle"] && IndexHandle.decode(hash["indexHandle"]),
664
+ hash["tableHandle"] && TableHandle.decode(hash["tableHandle"]),
665
+ hash["tableLayout"] && TableLayoutHandle.decode(hash["tableLayout"]),
666
+ hash["lookupSymbols"],
667
+ hash["outputSymbols"],
668
+ hash["assignments"],
669
+ hash["effectiveTupleDomain"],
670
+ )
671
+ obj
672
+ end
673
+ end
674
+
675
+ class << Input =
676
+ Base.new(:connector_id, :schema, :table, :connector_info, :columns)
677
+ def decode(hash)
678
+ unless hash.is_a?(Hash)
679
+ raise TypeError, "Can't convert #{hash.class} to Hash"
680
+ end
681
+ obj = allocate
682
+ obj.send(:initialize_struct,
683
+ hash["connectorId"],
684
+ hash["schema"],
685
+ hash["table"],
686
+ hash["connectorInfo"],
687
+ hash["columns"] && hash["columns"].map {|h| Column.decode(h) },
688
+ )
689
+ obj
690
+ end
691
+ end
692
+
693
+ class << InsertTableHandle =
694
+ Base.new(:connector_id, :transaction_handle, :connector_handle)
695
+ def decode(hash)
696
+ unless hash.is_a?(Hash)
697
+ raise TypeError, "Can't convert #{hash.class} to Hash"
698
+ end
699
+ obj = allocate
700
+ obj.send(:initialize_struct,
701
+ hash["connectorId"],
702
+ hash["transactionHandle"],
703
+ hash["connectorHandle"],
704
+ )
705
+ obj
706
+ end
707
+ end
708
+
709
+ class << IntersectNode =
710
+ Base.new(:id, :sources, :output_to_inputs, :outputs)
711
+ def decode(hash)
712
+ unless hash.is_a?(Hash)
713
+ raise TypeError, "Can't convert #{hash.class} to Hash"
714
+ end
715
+ obj = allocate
716
+ obj.send(:initialize_struct,
717
+ hash["id"],
718
+ hash["sources"] && hash["sources"].map {|h| PlanNode.decode(h) },
719
+ hash["outputToInputs"],
720
+ hash["outputs"],
721
+ )
722
+ obj
723
+ end
724
+ end
725
+
726
+ class << JoinNode =
727
+ Base.new(:id, :type, :left, :right, :criteria, :filter, :left_hash_symbol, :right_hash_symbol)
728
+ def decode(hash)
729
+ unless hash.is_a?(Hash)
730
+ raise TypeError, "Can't convert #{hash.class} to Hash"
731
+ end
732
+ obj = allocate
733
+ obj.send(:initialize_struct,
734
+ hash["id"],
735
+ hash["type"],
736
+ hash["left"] && PlanNode.decode(hash["left"]),
737
+ hash["right"] && PlanNode.decode(hash["right"]),
738
+ hash["criteria"] && hash["criteria"].map {|h| EquiJoinClause.decode(h) },
739
+ hash["filter"],
740
+ hash["leftHashSymbol"],
741
+ hash["rightHashSymbol"],
742
+ )
743
+ obj
744
+ end
745
+ end
746
+
747
+ class << LimitNode =
748
+ Base.new(:id, :source, :count, :partial)
749
+ def decode(hash)
750
+ unless hash.is_a?(Hash)
751
+ raise TypeError, "Can't convert #{hash.class} to Hash"
752
+ end
753
+ obj = allocate
754
+ obj.send(:initialize_struct,
755
+ hash["id"],
756
+ hash["source"] && PlanNode.decode(hash["source"]),
757
+ hash["count"],
758
+ hash["partial"],
759
+ )
760
+ obj
761
+ end
762
+ end
763
+
764
+ class << LongVariableConstraint =
765
+ Base.new(:name, :expression)
766
+ def decode(hash)
767
+ unless hash.is_a?(Hash)
768
+ raise TypeError, "Can't convert #{hash.class} to Hash"
769
+ end
770
+ obj = allocate
771
+ obj.send(:initialize_struct,
772
+ hash["name"],
773
+ hash["expression"],
774
+ )
775
+ obj
776
+ end
777
+ end
778
+
779
+ class << MarkDistinctNode =
780
+ Base.new(:id, :source, :marker_symbol, :distinct_symbols, :hash_symbol)
781
+ def decode(hash)
782
+ unless hash.is_a?(Hash)
783
+ raise TypeError, "Can't convert #{hash.class} to Hash"
784
+ end
785
+ obj = allocate
786
+ obj.send(:initialize_struct,
787
+ hash["id"],
788
+ hash["source"] && PlanNode.decode(hash["source"]),
789
+ hash["markerSymbol"],
790
+ hash["distinctSymbols"],
791
+ hash["hashSymbol"],
792
+ )
793
+ obj
794
+ end
795
+ end
796
+
797
+ class << MetadataDeleteNode =
798
+ Base.new(:id, :target, :output, :table_layout)
799
+ def decode(hash)
800
+ unless hash.is_a?(Hash)
801
+ raise TypeError, "Can't convert #{hash.class} to Hash"
802
+ end
803
+ obj = allocate
804
+ obj.send(:initialize_struct,
805
+ hash["id"],
806
+ hash["target"] && DeleteHandle.decode(hash["target"]),
807
+ hash["output"],
808
+ hash["tableLayout"] && TableLayoutHandle.decode(hash["tableLayout"]),
809
+ )
810
+ obj
811
+ end
812
+ end
813
+
814
+ class << OperatorStats =
815
+ Base.new(:operator_id, :plan_node_id, :operator_type, :add_input_calls, :add_input_wall, :add_input_cpu, :add_input_user, :input_data_size, :input_positions, :get_output_calls, :get_output_wall, :get_output_cpu, :get_output_user, :output_data_size, :output_positions, :blocked_wall, :finish_calls, :finish_wall, :finish_cpu, :finish_user, :memory_reservation, :system_memory_reservation, :blocked_reason, :info)
816
+ def decode(hash)
817
+ unless hash.is_a?(Hash)
818
+ raise TypeError, "Can't convert #{hash.class} to Hash"
819
+ end
820
+ obj = allocate
821
+ obj.send(:initialize_struct,
822
+ hash["operatorId"],
823
+ hash["planNodeId"],
824
+ hash["operatorType"],
825
+ hash["addInputCalls"],
826
+ hash["addInputWall"],
827
+ hash["addInputCpu"],
828
+ hash["addInputUser"],
829
+ hash["inputDataSize"],
830
+ hash["inputPositions"],
831
+ hash["getOutputCalls"],
832
+ hash["getOutputWall"],
833
+ hash["getOutputCpu"],
834
+ hash["getOutputUser"],
835
+ hash["outputDataSize"],
836
+ hash["outputPositions"],
837
+ hash["blockedWall"],
838
+ hash["finishCalls"],
839
+ hash["finishWall"],
840
+ hash["finishCpu"],
841
+ hash["finishUser"],
842
+ hash["memoryReservation"],
843
+ hash["systemMemoryReservation"],
844
+ hash["blockedReason"] && hash["blockedReason"].downcase.to_sym,
845
+ hash["info"],
846
+ )
847
+ obj
848
+ end
849
+ end
850
+
851
+ class << Output =
852
+ Base.new(:connector_id, :schema, :table)
853
+ def decode(hash)
854
+ unless hash.is_a?(Hash)
855
+ raise TypeError, "Can't convert #{hash.class} to Hash"
856
+ end
857
+ obj = allocate
858
+ obj.send(:initialize_struct,
859
+ hash["connectorId"],
860
+ hash["schema"],
861
+ hash["table"],
862
+ )
863
+ obj
864
+ end
865
+ end
866
+
867
+ class << OutputBufferInfo =
868
+ Base.new(:type, :state, :can_add_buffers, :can_add_pages, :total_buffered_bytes, :total_buffered_pages, :total_rows_sent, :total_pages_sent, :buffers)
869
+ def decode(hash)
870
+ unless hash.is_a?(Hash)
871
+ raise TypeError, "Can't convert #{hash.class} to Hash"
872
+ end
873
+ obj = allocate
874
+ obj.send(:initialize_struct,
875
+ hash["type"],
876
+ hash["state"] && hash["state"].downcase.to_sym,
877
+ hash["canAddBuffers"],
878
+ hash["canAddPages"],
879
+ hash["totalBufferedBytes"],
880
+ hash["totalBufferedPages"],
881
+ hash["totalRowsSent"],
882
+ hash["totalPagesSent"],
883
+ hash["buffers"] && hash["buffers"].map {|h| BufferInfo.decode(h) },
884
+ )
885
+ obj
886
+ end
887
+ end
888
+
889
+ class << OutputNode =
890
+ Base.new(:id, :source, :columns, :outputs)
891
+ def decode(hash)
892
+ unless hash.is_a?(Hash)
893
+ raise TypeError, "Can't convert #{hash.class} to Hash"
894
+ end
895
+ obj = allocate
896
+ obj.send(:initialize_struct,
897
+ hash["id"],
898
+ hash["source"] && PlanNode.decode(hash["source"]),
899
+ hash["columns"],
900
+ hash["outputs"],
901
+ )
902
+ obj
903
+ end
904
+ end
905
+
906
+ class << OutputTableHandle =
907
+ Base.new(:connector_id, :transaction_handle, :connector_handle)
908
+ def decode(hash)
909
+ unless hash.is_a?(Hash)
910
+ raise TypeError, "Can't convert #{hash.class} to Hash"
911
+ end
912
+ obj = allocate
913
+ obj.send(:initialize_struct,
914
+ hash["connectorId"],
915
+ hash["transactionHandle"],
916
+ hash["connectorHandle"],
917
+ )
918
+ obj
919
+ end
920
+ end
921
+
922
+ class << PageBufferInfo =
923
+ Base.new(:partition, :buffered_pages, :buffered_bytes, :rows_added, :pages_added)
924
+ def decode(hash)
925
+ unless hash.is_a?(Hash)
926
+ raise TypeError, "Can't convert #{hash.class} to Hash"
927
+ end
928
+ obj = allocate
929
+ obj.send(:initialize_struct,
930
+ hash["partition"],
931
+ hash["bufferedPages"],
932
+ hash["bufferedBytes"],
933
+ hash["rowsAdded"],
934
+ hash["pagesAdded"],
935
+ )
936
+ obj
937
+ end
938
+ end
939
+
940
+ class << Partitioning =
941
+ Base.new(:handle, :arguments)
942
+ def decode(hash)
943
+ unless hash.is_a?(Hash)
944
+ raise TypeError, "Can't convert #{hash.class} to Hash"
945
+ end
946
+ obj = allocate
947
+ obj.send(:initialize_struct,
948
+ hash["handle"] && PartitioningHandle.decode(hash["handle"]),
949
+ hash["arguments"] && hash["arguments"].map {|h| ArgumentBinding.decode(h) },
950
+ )
951
+ obj
952
+ end
953
+ end
954
+
955
+ class << PartitioningHandle =
956
+ Base.new(:connector_id, :transaction_handle, :connector_handle)
957
+ def decode(hash)
958
+ unless hash.is_a?(Hash)
959
+ raise TypeError, "Can't convert #{hash.class} to Hash"
960
+ end
961
+ obj = allocate
962
+ obj.send(:initialize_struct,
963
+ hash["connectorId"],
964
+ hash["transactionHandle"],
965
+ hash["connectorHandle"],
966
+ )
967
+ obj
968
+ end
969
+ end
970
+
971
+ class << PartitioningScheme =
972
+ Base.new(:partitioning, :output_layout, :hash_column, :replicate_nulls, :bucket_to_partition)
973
+ def decode(hash)
974
+ unless hash.is_a?(Hash)
975
+ raise TypeError, "Can't convert #{hash.class} to Hash"
976
+ end
977
+ obj = allocate
978
+ obj.send(:initialize_struct,
979
+ hash["partitioning"] && Partitioning.decode(hash["partitioning"]),
980
+ hash["outputLayout"],
981
+ hash["hashColumn"],
982
+ hash["replicateNulls"],
983
+ hash["bucketToPartition"],
984
+ )
985
+ obj
986
+ end
987
+ end
988
+
989
+ class << PipelineStats =
990
+ Base.new(:first_start_time, :last_start_time, :last_end_time, :input_pipeline, :output_pipeline, :total_drivers, :queued_drivers, :queued_partitioned_drivers, :running_drivers, :running_partitioned_drivers, :completed_drivers, :memory_reservation, :system_memory_reservation, :queued_time, :elapsed_time, :total_scheduled_time, :total_cpu_time, :total_user_time, :total_blocked_time, :fully_blocked, :blocked_reasons, :raw_input_data_size, :raw_input_positions, :processed_input_data_size, :processed_input_positions, :output_data_size, :output_positions, :operator_summaries, :drivers)
991
+ def decode(hash)
992
+ unless hash.is_a?(Hash)
993
+ raise TypeError, "Can't convert #{hash.class} to Hash"
994
+ end
995
+ obj = allocate
996
+ obj.send(:initialize_struct,
997
+ hash["firstStartTime"],
998
+ hash["lastStartTime"],
999
+ hash["lastEndTime"],
1000
+ hash["inputPipeline"],
1001
+ hash["outputPipeline"],
1002
+ hash["totalDrivers"],
1003
+ hash["queuedDrivers"],
1004
+ hash["queuedPartitionedDrivers"],
1005
+ hash["runningDrivers"],
1006
+ hash["runningPartitionedDrivers"],
1007
+ hash["completedDrivers"],
1008
+ hash["memoryReservation"],
1009
+ hash["systemMemoryReservation"],
1010
+ hash["queuedTime"] && DistributionSnapshot.decode(hash["queuedTime"]),
1011
+ hash["elapsedTime"] && DistributionSnapshot.decode(hash["elapsedTime"]),
1012
+ hash["totalScheduledTime"],
1013
+ hash["totalCpuTime"],
1014
+ hash["totalUserTime"],
1015
+ hash["totalBlockedTime"],
1016
+ hash["fullyBlocked"],
1017
+ hash["blockedReasons"] && hash["blockedReasons"].map {|h| h.downcase.to_sym },
1018
+ hash["rawInputDataSize"],
1019
+ hash["rawInputPositions"],
1020
+ hash["processedInputDataSize"],
1021
+ hash["processedInputPositions"],
1022
+ hash["outputDataSize"],
1023
+ hash["outputPositions"],
1024
+ hash["operatorSummaries"] && hash["operatorSummaries"].map {|h| OperatorStats.decode(h) },
1025
+ hash["drivers"] && hash["drivers"].map {|h| DriverStats.decode(h) },
1026
+ )
1027
+ obj
1028
+ end
1029
+ end
1030
+
1031
+ class << PlanFragment =
1032
+ Base.new(:id, :root, :symbols, :partitioning, :partitioned_sources, :partitioning_scheme)
1033
+ def decode(hash)
1034
+ unless hash.is_a?(Hash)
1035
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1036
+ end
1037
+ obj = allocate
1038
+ obj.send(:initialize_struct,
1039
+ hash["id"],
1040
+ hash["root"] && PlanNode.decode(hash["root"]),
1041
+ hash["symbols"],
1042
+ hash["partitioning"] && PartitioningHandle.decode(hash["partitioning"]),
1043
+ hash["partitionedSources"],
1044
+ hash["partitioningScheme"] && PartitioningScheme.decode(hash["partitioningScheme"]),
1045
+ )
1046
+ obj
1047
+ end
1048
+ end
1049
+
1050
+ class << ProjectNode =
1051
+ Base.new(:id, :source, :assignments)
1052
+ def decode(hash)
1053
+ unless hash.is_a?(Hash)
1054
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1055
+ end
1056
+ obj = allocate
1057
+ obj.send(:initialize_struct,
1058
+ hash["id"],
1059
+ hash["source"] && PlanNode.decode(hash["source"]),
1060
+ hash["assignments"],
1061
+ )
1062
+ obj
1063
+ end
1064
+ end
1065
+
1066
+ class << QueryError =
1067
+ Base.new(:message, :sql_state, :error_code, :error_name, :error_type, :error_location, :failure_info)
1068
+ def decode(hash)
1069
+ unless hash.is_a?(Hash)
1070
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1071
+ end
1072
+ obj = allocate
1073
+ obj.send(:initialize_struct,
1074
+ hash["message"],
1075
+ hash["sqlState"],
1076
+ hash["errorCode"],
1077
+ hash["errorName"],
1078
+ hash["errorType"],
1079
+ hash["errorLocation"] && ErrorLocation.decode(hash["errorLocation"]),
1080
+ hash["failureInfo"] && FailureInfo.decode(hash["failureInfo"]),
1081
+ )
1082
+ obj
1083
+ end
1084
+ end
1085
+
1086
+ class << QueryInfo =
1087
+ Base.new(:query_id, :session, :state, :memory_pool, :scheduled, :self, :field_names, :query, :query_stats, :set_session_properties, :reset_session_properties, :added_prepared_statements, :deallocated_prepared_statements, :started_transaction_id, :clear_transaction_id, :update_type, :output_stage, :failure_info, :error_code, :inputs, :output, :complete_info)
1088
+ def decode(hash)
1089
+ unless hash.is_a?(Hash)
1090
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1091
+ end
1092
+ obj = allocate
1093
+ obj.send(:initialize_struct,
1094
+ hash["queryId"],
1095
+ hash["session"] && SessionRepresentation.decode(hash["session"]),
1096
+ hash["state"] && hash["state"].downcase.to_sym,
1097
+ hash["memoryPool"],
1098
+ hash["scheduled"],
1099
+ hash["self"],
1100
+ hash["fieldNames"],
1101
+ hash["query"],
1102
+ hash["queryStats"] && QueryStats.decode(hash["queryStats"]),
1103
+ hash["setSessionProperties"],
1104
+ hash["resetSessionProperties"],
1105
+ hash["addedPreparedStatements"],
1106
+ hash["deallocatedPreparedStatements"],
1107
+ hash["startedTransactionId"],
1108
+ hash["clearTransactionId"],
1109
+ hash["updateType"],
1110
+ hash["outputStage"] && StageInfo.decode(hash["outputStage"]),
1111
+ hash["failureInfo"] && FailureInfo.decode(hash["failureInfo"]),
1112
+ hash["errorCode"] && ErrorCode.decode(hash["errorCode"]),
1113
+ hash["inputs"] && hash["inputs"].map {|h| Input.decode(h) },
1114
+ hash["output"] && Output.decode(hash["output"]),
1115
+ hash["completeInfo"],
1116
+ )
1117
+ obj
1118
+ end
1119
+ end
1120
+
1121
+ class << QueryResults =
1122
+ Base.new(:id, :info_uri, :partial_cancel_uri, :next_uri, :columns, :data, :stats, :error, :update_type, :update_count)
1123
+ def decode(hash)
1124
+ unless hash.is_a?(Hash)
1125
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1126
+ end
1127
+ obj = allocate
1128
+ obj.send(:initialize_struct,
1129
+ hash["id"],
1130
+ hash["infoUri"],
1131
+ hash["partialCancelUri"],
1132
+ hash["nextUri"],
1133
+ hash["columns"] && hash["columns"].map {|h| ClientColumn.decode(h) },
1134
+ hash["data"],
1135
+ hash["stats"] && StatementStats.decode(hash["stats"]),
1136
+ hash["error"] && QueryError.decode(hash["error"]),
1137
+ hash["updateType"],
1138
+ hash["updateCount"],
1139
+ )
1140
+ obj
1141
+ end
1142
+ end
1143
+
1144
+ class << QueryStats =
1145
+ Base.new(:create_time, :execution_start_time, :last_heartbeat, :end_time, :elapsed_time, :queued_time, :analysis_time, :distributed_planning_time, :total_planning_time, :finishing_time, :total_tasks, :running_tasks, :completed_tasks, :total_drivers, :queued_drivers, :running_drivers, :completed_drivers, :cumulative_memory, :total_memory_reservation, :peak_memory_reservation, :total_scheduled_time, :total_cpu_time, :total_user_time, :total_blocked_time, :fully_blocked, :blocked_reasons, :raw_input_data_size, :raw_input_positions, :processed_input_data_size, :processed_input_positions, :output_data_size, :output_positions)
1146
+ def decode(hash)
1147
+ unless hash.is_a?(Hash)
1148
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1149
+ end
1150
+ obj = allocate
1151
+ obj.send(:initialize_struct,
1152
+ hash["createTime"],
1153
+ hash["executionStartTime"],
1154
+ hash["lastHeartbeat"],
1155
+ hash["endTime"],
1156
+ hash["elapsedTime"],
1157
+ hash["queuedTime"],
1158
+ hash["analysisTime"],
1159
+ hash["distributedPlanningTime"],
1160
+ hash["totalPlanningTime"],
1161
+ hash["finishingTime"],
1162
+ hash["totalTasks"],
1163
+ hash["runningTasks"],
1164
+ hash["completedTasks"],
1165
+ hash["totalDrivers"],
1166
+ hash["queuedDrivers"],
1167
+ hash["runningDrivers"],
1168
+ hash["completedDrivers"],
1169
+ hash["cumulativeMemory"],
1170
+ hash["totalMemoryReservation"],
1171
+ hash["peakMemoryReservation"],
1172
+ hash["totalScheduledTime"],
1173
+ hash["totalCpuTime"],
1174
+ hash["totalUserTime"],
1175
+ hash["totalBlockedTime"],
1176
+ hash["fullyBlocked"],
1177
+ hash["blockedReasons"] && hash["blockedReasons"].map {|h| h.downcase.to_sym },
1178
+ hash["rawInputDataSize"],
1179
+ hash["rawInputPositions"],
1180
+ hash["processedInputDataSize"],
1181
+ hash["processedInputPositions"],
1182
+ hash["outputDataSize"],
1183
+ hash["outputPositions"],
1184
+ )
1185
+ obj
1186
+ end
1187
+ end
1188
+
1189
+ class << RemoteSourceNode =
1190
+ Base.new(:id, :source_fragment_ids, :outputs)
1191
+ def decode(hash)
1192
+ unless hash.is_a?(Hash)
1193
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1194
+ end
1195
+ obj = allocate
1196
+ obj.send(:initialize_struct,
1197
+ hash["id"],
1198
+ hash["sourceFragmentIds"],
1199
+ hash["outputs"],
1200
+ )
1201
+ obj
1202
+ end
1203
+ end
1204
+
1205
+ class << RowNumberNode =
1206
+ Base.new(:id, :source, :partition_by, :row_number_symbol, :max_row_count_per_partition, :hash_symbol)
1207
+ def decode(hash)
1208
+ unless hash.is_a?(Hash)
1209
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1210
+ end
1211
+ obj = allocate
1212
+ obj.send(:initialize_struct,
1213
+ hash["id"],
1214
+ hash["source"] && PlanNode.decode(hash["source"]),
1215
+ hash["partitionBy"],
1216
+ hash["rowNumberSymbol"],
1217
+ hash["maxRowCountPerPartition"],
1218
+ hash["hashSymbol"],
1219
+ )
1220
+ obj
1221
+ end
1222
+ end
1223
+
1224
+ class << SampleNode =
1225
+ Base.new(:id, :source, :sample_ratio, :sample_type, :rescaled, :sample_weight_symbol)
1226
+ def decode(hash)
1227
+ unless hash.is_a?(Hash)
1228
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1229
+ end
1230
+ obj = allocate
1231
+ obj.send(:initialize_struct,
1232
+ hash["id"],
1233
+ hash["source"] && PlanNode.decode(hash["source"]),
1234
+ hash["sampleRatio"],
1235
+ hash["sampleType"],
1236
+ hash["rescaled"],
1237
+ hash["sampleWeightSymbol"],
1238
+ )
1239
+ obj
1240
+ end
1241
+ end
1242
+
1243
+ class << SemiJoinNode =
1244
+ Base.new(:id, :source, :filtering_source, :source_join_symbol, :filtering_source_join_symbol, :semi_join_output, :source_hash_symbol, :filtering_source_hash_symbol)
1245
+ def decode(hash)
1246
+ unless hash.is_a?(Hash)
1247
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1248
+ end
1249
+ obj = allocate
1250
+ obj.send(:initialize_struct,
1251
+ hash["id"],
1252
+ hash["source"] && PlanNode.decode(hash["source"]),
1253
+ hash["filteringSource"] && PlanNode.decode(hash["filteringSource"]),
1254
+ hash["sourceJoinSymbol"],
1255
+ hash["filteringSourceJoinSymbol"],
1256
+ hash["semiJoinOutput"],
1257
+ hash["sourceHashSymbol"],
1258
+ hash["filteringSourceHashSymbol"],
1259
+ )
1260
+ obj
1261
+ end
1262
+ end
1263
+
1264
+ class << SessionRepresentation =
1265
+ Base.new(:query_id, :transaction_id, :client_transaction_support, :user, :principal, :source, :catalog, :schema, :time_zone_key, :locale, :remote_user_address, :user_agent, :start_time, :system_properties, :catalog_properties, :prepared_statements)
1266
+ def decode(hash)
1267
+ unless hash.is_a?(Hash)
1268
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1269
+ end
1270
+ obj = allocate
1271
+ obj.send(:initialize_struct,
1272
+ hash["queryId"],
1273
+ hash["transactionId"],
1274
+ hash["clientTransactionSupport"],
1275
+ hash["user"],
1276
+ hash["principal"],
1277
+ hash["source"],
1278
+ hash["catalog"],
1279
+ hash["schema"],
1280
+ hash["timeZoneKey"],
1281
+ hash["locale"],
1282
+ hash["remoteUserAddress"],
1283
+ hash["userAgent"],
1284
+ hash["startTime"],
1285
+ hash["systemProperties"],
1286
+ hash["catalogProperties"],
1287
+ hash["preparedStatements"],
1288
+ )
1289
+ obj
1290
+ end
1291
+ end
1292
+
1293
+ class << Signature =
1294
+ Base.new(:name, :kind, :type_variable_constraints, :long_variable_constraints, :return_type, :argument_types, :variable_arity)
1295
+ def decode(hash)
1296
+ unless hash.is_a?(Hash)
1297
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1298
+ end
1299
+ obj = allocate
1300
+ obj.send(:initialize_struct,
1301
+ hash["name"],
1302
+ hash["kind"] && hash["kind"].downcase.to_sym,
1303
+ hash["typeVariableConstraints"] && hash["typeVariableConstraints"].map {|h| TypeVariableConstraint.decode(h) },
1304
+ hash["longVariableConstraints"] && hash["longVariableConstraints"].map {|h| LongVariableConstraint.decode(h) },
1305
+ hash["returnType"],
1306
+ hash["argumentTypes"],
1307
+ hash["variableArity"],
1308
+ )
1309
+ obj
1310
+ end
1311
+ end
1312
+
1313
+ class << SortNode =
1314
+ Base.new(:id, :source, :order_by, :orderings)
1315
+ def decode(hash)
1316
+ unless hash.is_a?(Hash)
1317
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1318
+ end
1319
+ obj = allocate
1320
+ obj.send(:initialize_struct,
1321
+ hash["id"],
1322
+ hash["source"] && PlanNode.decode(hash["source"]),
1323
+ hash["orderBy"],
1324
+ hash["orderings"] && Hash[hash["orderings"].to_a.map! {|k,v| [k, v.downcase.to_sym] }],
1325
+ )
1326
+ obj
1327
+ end
1328
+ end
1329
+
1330
+ class << StageInfo =
1331
+ Base.new(:stage_id, :state, :self, :plan, :types, :stage_stats, :tasks, :sub_stages, :failure_cause)
1332
+ def decode(hash)
1333
+ unless hash.is_a?(Hash)
1334
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1335
+ end
1336
+ obj = allocate
1337
+ obj.send(:initialize_struct,
1338
+ hash["stageId"] && StageId.new(hash["stageId"]),
1339
+ hash["state"] && hash["state"].downcase.to_sym,
1340
+ hash["self"],
1341
+ hash["plan"] && PlanFragment.decode(hash["plan"]),
1342
+ hash["types"],
1343
+ hash["stageStats"] && StageStats.decode(hash["stageStats"]),
1344
+ hash["tasks"] && hash["tasks"].map {|h| TaskInfo.decode(h) },
1345
+ hash["subStages"] && hash["subStages"].map {|h| StageInfo.decode(h) },
1346
+ hash["failureCause"] && ExecutionFailureInfo.decode(hash["failureCause"]),
1347
+ )
1348
+ obj
1349
+ end
1350
+ end
1351
+
1352
+ class << StageStats =
1353
+ Base.new(:scheduling_complete, :get_split_distribution, :schedule_task_distribution, :add_split_distribution, :total_tasks, :running_tasks, :completed_tasks, :total_drivers, :queued_drivers, :running_drivers, :completed_drivers, :cumulative_memory, :total_memory_reservation, :peak_memory_reservation, :total_scheduled_time, :total_cpu_time, :total_user_time, :total_blocked_time, :fully_blocked, :blocked_reasons, :raw_input_data_size, :raw_input_positions, :processed_input_data_size, :processed_input_positions, :output_data_size, :output_positions)
1354
+ def decode(hash)
1355
+ unless hash.is_a?(Hash)
1356
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1357
+ end
1358
+ obj = allocate
1359
+ obj.send(:initialize_struct,
1360
+ hash["schedulingComplete"],
1361
+ hash["getSplitDistribution"] && DistributionSnapshot.decode(hash["getSplitDistribution"]),
1362
+ hash["scheduleTaskDistribution"] && DistributionSnapshot.decode(hash["scheduleTaskDistribution"]),
1363
+ hash["addSplitDistribution"] && DistributionSnapshot.decode(hash["addSplitDistribution"]),
1364
+ hash["totalTasks"],
1365
+ hash["runningTasks"],
1366
+ hash["completedTasks"],
1367
+ hash["totalDrivers"],
1368
+ hash["queuedDrivers"],
1369
+ hash["runningDrivers"],
1370
+ hash["completedDrivers"],
1371
+ hash["cumulativeMemory"],
1372
+ hash["totalMemoryReservation"],
1373
+ hash["peakMemoryReservation"],
1374
+ hash["totalScheduledTime"],
1375
+ hash["totalCpuTime"],
1376
+ hash["totalUserTime"],
1377
+ hash["totalBlockedTime"],
1378
+ hash["fullyBlocked"],
1379
+ hash["blockedReasons"] && hash["blockedReasons"].map {|h| h.downcase.to_sym },
1380
+ hash["rawInputDataSize"],
1381
+ hash["rawInputPositions"],
1382
+ hash["processedInputDataSize"],
1383
+ hash["processedInputPositions"],
1384
+ hash["outputDataSize"],
1385
+ hash["outputPositions"],
1386
+ )
1387
+ obj
1388
+ end
1389
+ end
1390
+
1391
+ class << StatementStats =
1392
+ Base.new(:state, :queued, :scheduled, :nodes, :total_splits, :queued_splits, :running_splits, :completed_splits, :user_time_millis, :cpu_time_millis, :wall_time_millis, :processed_rows, :processed_bytes, :root_stage)
1393
+ def decode(hash)
1394
+ unless hash.is_a?(Hash)
1395
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1396
+ end
1397
+ obj = allocate
1398
+ obj.send(:initialize_struct,
1399
+ hash["state"],
1400
+ hash["queued"],
1401
+ hash["scheduled"],
1402
+ hash["nodes"],
1403
+ hash["totalSplits"],
1404
+ hash["queuedSplits"],
1405
+ hash["runningSplits"],
1406
+ hash["completedSplits"],
1407
+ hash["userTimeMillis"],
1408
+ hash["cpuTimeMillis"],
1409
+ hash["wallTimeMillis"],
1410
+ hash["processedRows"],
1411
+ hash["processedBytes"],
1412
+ hash["rootStage"] && ClientStageStats.decode(hash["rootStage"]),
1413
+ )
1414
+ obj
1415
+ end
1416
+ end
1417
+
1418
+ class << TableFinishNode =
1419
+ Base.new(:id, :source, :target, :outputs)
1420
+ def decode(hash)
1421
+ unless hash.is_a?(Hash)
1422
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1423
+ end
1424
+ obj = allocate
1425
+ obj.send(:initialize_struct,
1426
+ hash["id"],
1427
+ hash["source"] && PlanNode.decode(hash["source"]),
1428
+ hash["target"] && WriterTarget.decode(hash["target"]),
1429
+ hash["outputs"],
1430
+ )
1431
+ obj
1432
+ end
1433
+ end
1434
+
1435
+ class << TableHandle =
1436
+ Base.new(:connector_id, :connector_handle)
1437
+ def decode(hash)
1438
+ unless hash.is_a?(Hash)
1439
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1440
+ end
1441
+ obj = allocate
1442
+ obj.send(:initialize_struct,
1443
+ hash["connectorId"],
1444
+ hash["connectorHandle"],
1445
+ )
1446
+ obj
1447
+ end
1448
+ end
1449
+
1450
+ class << TableLayoutHandle =
1451
+ Base.new(:connector_id, :transaction_handle, :connector_handle)
1452
+ def decode(hash)
1453
+ unless hash.is_a?(Hash)
1454
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1455
+ end
1456
+ obj = allocate
1457
+ obj.send(:initialize_struct,
1458
+ hash["connectorId"],
1459
+ hash["transactionHandle"],
1460
+ hash["connectorHandle"],
1461
+ )
1462
+ obj
1463
+ end
1464
+ end
1465
+
1466
+ class << TableScanNode =
1467
+ Base.new(:id, :table, :output_symbols, :assignments, :layout, :current_constraint, :original_constraint)
1468
+ def decode(hash)
1469
+ unless hash.is_a?(Hash)
1470
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1471
+ end
1472
+ obj = allocate
1473
+ obj.send(:initialize_struct,
1474
+ hash["id"],
1475
+ hash["table"] && TableHandle.decode(hash["table"]),
1476
+ hash["outputSymbols"],
1477
+ hash["assignments"],
1478
+ hash["layout"] && TableLayoutHandle.decode(hash["layout"]),
1479
+ hash["currentConstraint"],
1480
+ hash["originalConstraint"],
1481
+ )
1482
+ obj
1483
+ end
1484
+ end
1485
+
1486
+ class << TableWriterNode =
1487
+ Base.new(:id, :source, :target, :columns, :column_names, :outputs, :sample_weight_symbol, :partitioning_scheme)
1488
+ def decode(hash)
1489
+ unless hash.is_a?(Hash)
1490
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1491
+ end
1492
+ obj = allocate
1493
+ obj.send(:initialize_struct,
1494
+ hash["id"],
1495
+ hash["source"] && PlanNode.decode(hash["source"]),
1496
+ hash["target"] && WriterTarget.decode(hash["target"]),
1497
+ hash["columns"],
1498
+ hash["columnNames"],
1499
+ hash["outputs"],
1500
+ hash["sampleWeightSymbol"],
1501
+ hash["partitioningScheme"] && PartitioningScheme.decode(hash["partitioningScheme"]),
1502
+ )
1503
+ obj
1504
+ end
1505
+ end
1506
+
1507
+ class << TaskInfo =
1508
+ Base.new(:task_status, :last_heartbeat, :output_buffers, :no_more_splits, :stats, :needs_plan, :complete)
1509
+ def decode(hash)
1510
+ unless hash.is_a?(Hash)
1511
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1512
+ end
1513
+ obj = allocate
1514
+ obj.send(:initialize_struct,
1515
+ hash["taskStatus"] && TaskStatus.decode(hash["taskStatus"]),
1516
+ hash["lastHeartbeat"],
1517
+ hash["outputBuffers"] && OutputBufferInfo.decode(hash["outputBuffers"]),
1518
+ hash["noMoreSplits"],
1519
+ hash["stats"] && TaskStats.decode(hash["stats"]),
1520
+ hash["needsPlan"],
1521
+ hash["complete"],
1522
+ )
1523
+ obj
1524
+ end
1525
+ end
1526
+
1527
+ class << TaskStats =
1528
+ Base.new(:create_time, :first_start_time, :last_start_time, :last_end_time, :end_time, :elapsed_time, :queued_time, :total_drivers, :queued_drivers, :queued_partitioned_drivers, :running_drivers, :running_partitioned_drivers, :completed_drivers, :cumulative_memory, :memory_reservation, :system_memory_reservation, :total_scheduled_time, :total_cpu_time, :total_user_time, :total_blocked_time, :fully_blocked, :blocked_reasons, :raw_input_data_size, :raw_input_positions, :processed_input_data_size, :processed_input_positions, :output_data_size, :output_positions, :pipelines)
1529
+ def decode(hash)
1530
+ unless hash.is_a?(Hash)
1531
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1532
+ end
1533
+ obj = allocate
1534
+ obj.send(:initialize_struct,
1535
+ hash["createTime"],
1536
+ hash["firstStartTime"],
1537
+ hash["lastStartTime"],
1538
+ hash["lastEndTime"],
1539
+ hash["endTime"],
1540
+ hash["elapsedTime"],
1541
+ hash["queuedTime"],
1542
+ hash["totalDrivers"],
1543
+ hash["queuedDrivers"],
1544
+ hash["queuedPartitionedDrivers"],
1545
+ hash["runningDrivers"],
1546
+ hash["runningPartitionedDrivers"],
1547
+ hash["completedDrivers"],
1548
+ hash["cumulativeMemory"],
1549
+ hash["memoryReservation"],
1550
+ hash["systemMemoryReservation"],
1551
+ hash["totalScheduledTime"],
1552
+ hash["totalCpuTime"],
1553
+ hash["totalUserTime"],
1554
+ hash["totalBlockedTime"],
1555
+ hash["fullyBlocked"],
1556
+ hash["blockedReasons"] && hash["blockedReasons"].map {|h| h.downcase.to_sym },
1557
+ hash["rawInputDataSize"],
1558
+ hash["rawInputPositions"],
1559
+ hash["processedInputDataSize"],
1560
+ hash["processedInputPositions"],
1561
+ hash["outputDataSize"],
1562
+ hash["outputPositions"],
1563
+ hash["pipelines"] && hash["pipelines"].map {|h| PipelineStats.decode(h) },
1564
+ )
1565
+ obj
1566
+ end
1567
+ end
1568
+
1569
+ class << TaskStatus =
1570
+ Base.new(:task_id, :task_instance_id, :version, :state, :self, :failures, :queued_partitioned_drivers, :running_partitioned_drivers, :memory_reservation)
1571
+ def decode(hash)
1572
+ unless hash.is_a?(Hash)
1573
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1574
+ end
1575
+ obj = allocate
1576
+ obj.send(:initialize_struct,
1577
+ hash["taskId"] && TaskId.new(hash["taskId"]),
1578
+ hash["taskInstanceId"],
1579
+ hash["version"],
1580
+ hash["state"] && hash["state"].downcase.to_sym,
1581
+ hash["self"],
1582
+ hash["failures"] && hash["failures"].map {|h| ExecutionFailureInfo.decode(h) },
1583
+ hash["queuedPartitionedDrivers"],
1584
+ hash["runningPartitionedDrivers"],
1585
+ hash["memoryReservation"],
1586
+ )
1587
+ obj
1588
+ end
1589
+ end
1590
+
1591
+ class << TopNNode =
1592
+ Base.new(:id, :source, :count, :order_by, :orderings, :partial)
1593
+ def decode(hash)
1594
+ unless hash.is_a?(Hash)
1595
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1596
+ end
1597
+ obj = allocate
1598
+ obj.send(:initialize_struct,
1599
+ hash["id"],
1600
+ hash["source"] && PlanNode.decode(hash["source"]),
1601
+ hash["count"],
1602
+ hash["orderBy"],
1603
+ hash["orderings"] && Hash[hash["orderings"].to_a.map! {|k,v| [k, v.downcase.to_sym] }],
1604
+ hash["partial"],
1605
+ )
1606
+ obj
1607
+ end
1608
+ end
1609
+
1610
+ class << TopNRowNumberNode =
1611
+ Base.new(:id, :source, :specification, :row_number_symbol, :max_row_count_per_partition, :partial, :hash_symbol)
1612
+ def decode(hash)
1613
+ unless hash.is_a?(Hash)
1614
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1615
+ end
1616
+ obj = allocate
1617
+ obj.send(:initialize_struct,
1618
+ hash["id"],
1619
+ hash["source"] && PlanNode.decode(hash["source"]),
1620
+ hash["specification"] && Specification.decode(hash["specification"]),
1621
+ hash["rowNumberSymbol"],
1622
+ hash["maxRowCountPerPartition"],
1623
+ hash["partial"],
1624
+ hash["hashSymbol"],
1625
+ )
1626
+ obj
1627
+ end
1628
+ end
1629
+
1630
+ class << TypeVariableConstraint =
1631
+ Base.new(:name, :comparable_required, :orderable_required, :variadic_bound)
1632
+ def decode(hash)
1633
+ unless hash.is_a?(Hash)
1634
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1635
+ end
1636
+ obj = allocate
1637
+ obj.send(:initialize_struct,
1638
+ hash["name"],
1639
+ hash["comparableRequired"],
1640
+ hash["orderableRequired"],
1641
+ hash["variadicBound"],
1642
+ )
1643
+ obj
1644
+ end
1645
+ end
1646
+
1647
+ class << UnionNode =
1648
+ Base.new(:id, :sources, :output_to_inputs, :outputs)
1649
+ def decode(hash)
1650
+ unless hash.is_a?(Hash)
1651
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1652
+ end
1653
+ obj = allocate
1654
+ obj.send(:initialize_struct,
1655
+ hash["id"],
1656
+ hash["sources"] && hash["sources"].map {|h| PlanNode.decode(h) },
1657
+ hash["outputToInputs"],
1658
+ hash["outputs"],
1659
+ )
1660
+ obj
1661
+ end
1662
+ end
1663
+
1664
+ class << UnnestNode =
1665
+ Base.new(:id, :source, :replicate_symbols, :unnest_symbols, :ordinality_symbol)
1666
+ def decode(hash)
1667
+ unless hash.is_a?(Hash)
1668
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1669
+ end
1670
+ obj = allocate
1671
+ obj.send(:initialize_struct,
1672
+ hash["id"],
1673
+ hash["source"] && PlanNode.decode(hash["source"]),
1674
+ hash["replicateSymbols"],
1675
+ hash["unnestSymbols"],
1676
+ hash["ordinalitySymbol"],
1677
+ )
1678
+ obj
1679
+ end
1680
+ end
1681
+
1682
+ class << ValuesNode =
1683
+ Base.new(:id, :output_symbols, :rows)
1684
+ def decode(hash)
1685
+ unless hash.is_a?(Hash)
1686
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1687
+ end
1688
+ obj = allocate
1689
+ obj.send(:initialize_struct,
1690
+ hash["id"],
1691
+ hash["outputSymbols"],
1692
+ hash["rows"],
1693
+ )
1694
+ obj
1695
+ end
1696
+ end
1697
+
1698
+ class << WindowNode =
1699
+ Base.new(:id, :source, :specification, :window_functions, :hash_symbol, :pre_partitioned_inputs, :pre_sorted_order_prefix)
1700
+ def decode(hash)
1701
+ unless hash.is_a?(Hash)
1702
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1703
+ end
1704
+ obj = allocate
1705
+ obj.send(:initialize_struct,
1706
+ hash["id"],
1707
+ hash["source"] && PlanNode.decode(hash["source"]),
1708
+ hash["specification"] && Specification.decode(hash["specification"]),
1709
+ hash["hashSymbol"],
1710
+ hash["prePartitionedInputs"],
1711
+ hash["preSortedOrderPrefix"],
1712
+ )
1713
+ obj
1714
+ end
1715
+ end
1716
+
1717
+
1718
+ end
1719
+ end