trino-client 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. checksums.yaml +7 -0
  2. data/.github/CODEOWNERS +1 -0
  3. data/.github/PULL_REQUEST_TEMPLATE.md +18 -0
  4. data/.github/workflows/ruby.yml +30 -0
  5. data/.gitignore +4 -0
  6. data/ChangeLog.md +168 -0
  7. data/Gemfile +7 -0
  8. data/LICENSE +202 -0
  9. data/README.md +131 -0
  10. data/Rakefile +45 -0
  11. data/lib/trino-client.rb +1 -0
  12. data/lib/trino/client.rb +23 -0
  13. data/lib/trino/client/client.rb +78 -0
  14. data/lib/trino/client/errors.rb +46 -0
  15. data/lib/trino/client/faraday_client.rb +242 -0
  16. data/lib/trino/client/model_versions/0.149.rb +1683 -0
  17. data/lib/trino/client/model_versions/0.153.rb +1719 -0
  18. data/lib/trino/client/model_versions/0.173.rb +1685 -0
  19. data/lib/trino/client/model_versions/0.178.rb +1964 -0
  20. data/lib/trino/client/model_versions/0.205.rb +2169 -0
  21. data/lib/trino/client/model_versions/303.rb +2574 -0
  22. data/lib/trino/client/model_versions/316.rb +2595 -0
  23. data/lib/trino/client/model_versions/351.rb +2726 -0
  24. data/lib/trino/client/models.rb +38 -0
  25. data/lib/trino/client/query.rb +144 -0
  26. data/lib/trino/client/statement_client.rb +279 -0
  27. data/lib/trino/client/version.rb +20 -0
  28. data/modelgen/model_versions.rb +280 -0
  29. data/modelgen/modelgen.rb +119 -0
  30. data/modelgen/models.rb +31 -0
  31. data/modelgen/trino_models.rb +270 -0
  32. data/release.rb +56 -0
  33. data/spec/basic_query_spec.rb +82 -0
  34. data/spec/client_spec.rb +75 -0
  35. data/spec/gzip_spec.rb +40 -0
  36. data/spec/model_spec.rb +35 -0
  37. data/spec/spec_helper.rb +42 -0
  38. data/spec/statement_client_spec.rb +637 -0
  39. data/spec/tpch/q01.sql +21 -0
  40. data/spec/tpch/q02.sql +43 -0
  41. data/spec/tpch_query_spec.rb +41 -0
  42. data/trino-client.gemspec +31 -0
  43. metadata +211 -0
@@ -0,0 +1,1683 @@
1
+ #
2
+ # Trino client for Ruby
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ module Trino::Client::ModelVersions
17
+
18
+ ####
19
+ ## lib/trino/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command.
20
+ ## You should not edit this file directly. To modify the class definitions, edit
21
+ ## modelgen/model_versions.rb file and run "rake modelgen:all".
22
+ ##
23
+
24
+ module V0_149
25
+ class Base < Struct
26
+ class << self
27
+ alias_method :new_struct, :new
28
+
29
+ def new(*args)
30
+ new_struct(*args) do
31
+ # make it immutable
32
+ undef_method :"[]="
33
+ members.each do |m|
34
+ undef_method :"#{m}="
35
+ end
36
+
37
+ # replace constructor to receive hash instead of array
38
+ alias_method :initialize_struct, :initialize
39
+
40
+ def initialize(params={})
41
+ initialize_struct(*members.map {|m| params[m] })
42
+ end
43
+ end
44
+ end
45
+ end
46
+ end
47
+
48
+ class StageId < String
49
+ def initialize(str)
50
+ super
51
+ splitted = split('.', 2)
52
+ @query_id = splitted[0]
53
+ @id = splitted[1]
54
+ end
55
+
56
+ attr_reader :query_id, :id
57
+ end
58
+
59
+ class TaskId < String
60
+ def initialize(str)
61
+ super
62
+ splitted = split('.', 3)
63
+ @stage_id = StageId.new("#{splitted[0]}.#{splitted[1]}")
64
+ @query_id = @stage_id.query_id
65
+ @id = splitted[2]
66
+ end
67
+
68
+ attr_reader :query_id, :stage_id, :id
69
+ end
70
+
71
+ class ConnectorSession < Hash
72
+ def initialize(hash)
73
+ super()
74
+ merge!(hash)
75
+ end
76
+ end
77
+
78
+ module PlanNode
79
+ def self.decode(hash)
80
+ unless hash.is_a?(Hash)
81
+ raise TypeError, "Can't convert #{hash.class} to Hash"
82
+ end
83
+ model_class = case hash["@type"]
84
+ when "output" then OutputNode
85
+ when "project" then ProjectNode
86
+ when "tablescan" then TableScanNode
87
+ when "values" then ValuesNode
88
+ when "aggregation" then AggregationNode
89
+ when "markDistinct" then MarkDistinctNode
90
+ when "filter" then FilterNode
91
+ when "window" then WindowNode
92
+ when "rowNumber" then RowNumberNode
93
+ when "topnRowNumber" then TopNRowNumberNode
94
+ when "limit" then LimitNode
95
+ when "distinctlimit" then DistinctLimitNode
96
+ when "topn" then TopNNode
97
+ when "sample" then SampleNode
98
+ when "sort" then SortNode
99
+ when "remoteSource" then RemoteSourceNode
100
+ when "join" then JoinNode
101
+ when "semijoin" then SemiJoinNode
102
+ when "indexjoin" then IndexJoinNode
103
+ when "indexsource" then IndexSourceNode
104
+ when "tablewriter" then TableWriterNode
105
+ when "delete" then DeleteNode
106
+ when "metadatadelete" then MetadataDeleteNode
107
+ when "tablecommit" then TableFinishNode
108
+ when "unnest" then UnnestNode
109
+ when "exchange" then ExchangeNode
110
+ when "union" then UnionNode
111
+ when "intersect" then IntersectNode
112
+ when "scalar" then EnforceSingleRowNode
113
+ when "groupid" then GroupIdNode
114
+ when "explainAnalyze" then ExplainAnalyzeNode
115
+ when "apply" then ApplyNode
116
+ end
117
+ if model_class
118
+ node = model_class.decode(hash)
119
+ class << node
120
+ attr_accessor :plan_node_type
121
+ end
122
+ node.plan_node_type = hash['@type']
123
+ node
124
+ end
125
+ end
126
+ end
127
+
128
+ # io.airlift.stats.Distribution.DistributionSnapshot
129
+ class << DistributionSnapshot =
130
+ Base.new(:max_error, :count, :total, :p01, :p05, :p10, :p25, :p50, :p75, :p90, :p95, :p99, :min, :max)
131
+ def decode(hash)
132
+ unless hash.is_a?(Hash)
133
+ raise TypeError, "Can't convert #{hash.class} to Hash"
134
+ end
135
+ obj = allocate
136
+ obj.send(:initialize_struct,
137
+ hash["maxError"],
138
+ hash["count"],
139
+ hash["total"],
140
+ hash["p01"],
141
+ hash["p05"],
142
+ hash["p10"],
143
+ hash["p25"],
144
+ hash["p50"],
145
+ hash["p75"],
146
+ hash["p90"],
147
+ hash["p95"],
148
+ hash["p99"],
149
+ hash["min"],
150
+ hash["max"],
151
+ )
152
+ obj
153
+ end
154
+ end
155
+
156
+ # This is a hybrid of JoinNode.EquiJoinClause and IndexJoinNode.EquiJoinClause
157
+ class << EquiJoinClause =
158
+ Base.new(:left, :right, :probe, :index)
159
+ def decode(hash)
160
+ unless hash.is_a?(Hash)
161
+ raise TypeError, "Can't convert #{hash.class} to Hash"
162
+ end
163
+ obj = allocate
164
+ obj.send(:initialize_struct,
165
+ hash["left"],
166
+ hash["right"],
167
+ hash["probe"],
168
+ hash["index"],
169
+ )
170
+ obj
171
+ end
172
+ end
173
+
174
+ class << WriterTarget =
175
+ Base.new(:type, :handle)
176
+ def decode(hash)
177
+ unless hash.is_a?(Hash)
178
+ raise TypeError, "Can't convert #{hash.class} to Hash"
179
+ end
180
+ obj = allocate
181
+ model_class = case hash["@type"]
182
+ when "CreateHandle" then OutputTableHandle
183
+ when "InsertHandle" then InsertTableHandle
184
+ when "DeleteHandle" then TableHandle
185
+ end
186
+ obj.send(:initialize_struct,
187
+ hash["@type"],
188
+ model_class.decode(hash['handle'])
189
+ )
190
+ obj
191
+ end
192
+ end
193
+
194
+ class << DeleteHandle =
195
+ Base.new(:handle)
196
+ def decode(hash)
197
+ unless hash.is_a?(Hash)
198
+ raise TypeError, "Can't convert #{hash.class} to Hash"
199
+ end
200
+ obj = allocate
201
+ obj.send(:initialize_struct,
202
+ TableHandle.decode(hash['handle'])
203
+ )
204
+ obj
205
+ end
206
+ end
207
+
208
+ # Inner classes
209
+ class << Specification =
210
+ Base.new(:partition_by, :order_by, :orderings, :frame, :pages_added)
211
+ def decode(hash)
212
+ unless hash.is_a?(Hash)
213
+ raise TypeError, "Can't convert #{hash.class} to Hash"
214
+ end
215
+ obj = allocate
216
+ obj.send(:initialize_struct,
217
+ hash["partitionBy"],
218
+ hash["orderBy"],
219
+ hash["orderings"],
220
+ hash["frame"],
221
+ )
222
+ obj
223
+ end
224
+ end
225
+
226
+ class << ArgumentBinding =
227
+ Base.new(:column, :constant)
228
+ def decode(hash)
229
+ unless hash.is_a?(Hash)
230
+ raise TypeError, "Can't convert #{hash.class} to Hash"
231
+ end
232
+ obj = allocate
233
+ obj.send(:initialize_struct,
234
+ hash["column"],
235
+ hash["constant"]
236
+ )
237
+ obj
238
+ end
239
+ end
240
+
241
+ ##
242
+ # Those model classes are automatically generated
243
+ #
244
+
245
+ class << AggregationNode =
246
+ Base.new(:id, :source, :group_by, :aggregations, :functions, :masks, :grouping_sets, :step, :sample_weight, :confidence, :hash_symbol)
247
+ def decode(hash)
248
+ unless hash.is_a?(Hash)
249
+ raise TypeError, "Can't convert #{hash.class} to Hash"
250
+ end
251
+ obj = allocate
252
+ obj.send(:initialize_struct,
253
+ hash["id"],
254
+ hash["source"] && PlanNode.decode(hash["source"]),
255
+ hash["groupBy"],
256
+ hash["aggregations"],
257
+ hash["functions"] && Hash[hash["functions"].to_a.map! {|k,v| [k, Signature.decode(v)] }],
258
+ hash["masks"],
259
+ hash["groupingSets"],
260
+ hash["step"] && hash["step"].downcase.to_sym,
261
+ hash["sampleWeight"],
262
+ hash["confidence"],
263
+ hash["hashSymbol"],
264
+ )
265
+ obj
266
+ end
267
+ end
268
+
269
+ class << ApplyNode =
270
+ Base.new(:id, :input, :subquery, :correlation)
271
+ def decode(hash)
272
+ unless hash.is_a?(Hash)
273
+ raise TypeError, "Can't convert #{hash.class} to Hash"
274
+ end
275
+ obj = allocate
276
+ obj.send(:initialize_struct,
277
+ hash["id"],
278
+ hash["input"] && PlanNode.decode(hash["input"]),
279
+ hash["subquery"] && PlanNode.decode(hash["subquery"]),
280
+ hash["correlation"],
281
+ )
282
+ obj
283
+ end
284
+ end
285
+
286
+ class << BufferInfo =
287
+ Base.new(:buffer_id, :finished, :buffered_pages, :pages_sent, :page_buffer_info)
288
+ def decode(hash)
289
+ unless hash.is_a?(Hash)
290
+ raise TypeError, "Can't convert #{hash.class} to Hash"
291
+ end
292
+ obj = allocate
293
+ obj.send(:initialize_struct,
294
+ hash["bufferId"] && TaskId.new(hash["bufferId"]),
295
+ hash["finished"],
296
+ hash["bufferedPages"],
297
+ hash["pagesSent"],
298
+ hash["pageBufferInfo"] && PageBufferInfo.decode(hash["pageBufferInfo"]),
299
+ )
300
+ obj
301
+ end
302
+ end
303
+
304
+ class << ClientColumn =
305
+ Base.new(:name, :type, :type_signature)
306
+ def decode(hash)
307
+ unless hash.is_a?(Hash)
308
+ raise TypeError, "Can't convert #{hash.class} to Hash"
309
+ end
310
+ obj = allocate
311
+ obj.send(:initialize_struct,
312
+ hash["name"],
313
+ hash["type"],
314
+ hash["typeSignature"] && ClientTypeSignature.decode(hash["typeSignature"]),
315
+ )
316
+ obj
317
+ end
318
+ end
319
+
320
+ class << ClientStageStats =
321
+ Base.new(:stage_id, :state, :done, :nodes, :total_splits, :queued_splits, :running_splits, :completed_splits, :user_time_millis, :cpu_time_millis, :wall_time_millis, :processed_rows, :processed_bytes, :sub_stages)
322
+ def decode(hash)
323
+ unless hash.is_a?(Hash)
324
+ raise TypeError, "Can't convert #{hash.class} to Hash"
325
+ end
326
+ obj = allocate
327
+ obj.send(:initialize_struct,
328
+ hash["stageId"],
329
+ hash["state"],
330
+ hash["done"],
331
+ hash["nodes"],
332
+ hash["totalSplits"],
333
+ hash["queuedSplits"],
334
+ hash["runningSplits"],
335
+ hash["completedSplits"],
336
+ hash["userTimeMillis"],
337
+ hash["cpuTimeMillis"],
338
+ hash["wallTimeMillis"],
339
+ hash["processedRows"],
340
+ hash["processedBytes"],
341
+ hash["subStages"] && hash["subStages"].map {|h| ClientStageStats.decode(h) },
342
+ )
343
+ obj
344
+ end
345
+ end
346
+
347
+ class << ClientTypeSignature =
348
+ Base.new(:raw_type, :type_arguments, :literal_arguments, :arguments)
349
+ def decode(hash)
350
+ unless hash.is_a?(Hash)
351
+ raise TypeError, "Can't convert #{hash.class} to Hash"
352
+ end
353
+ obj = allocate
354
+ obj.send(:initialize_struct,
355
+ hash["rawType"],
356
+ hash["typeArguments"] && hash["typeArguments"].map {|h| ClientTypeSignature.decode(h) },
357
+ hash["literalArguments"],
358
+ hash["arguments"] && hash["arguments"].map {|h| ClientTypeSignatureParameter.decode(h) },
359
+ )
360
+ obj
361
+ end
362
+ end
363
+
364
+ class << ClientTypeSignatureParameter =
365
+ Base.new(:kind, :value)
366
+ def decode(hash)
367
+ unless hash.is_a?(Hash)
368
+ raise TypeError, "Can't convert #{hash.class} to Hash"
369
+ end
370
+ obj = allocate
371
+ obj.send(:initialize_struct,
372
+ hash["kind"] && hash["kind"].downcase.to_sym,
373
+ hash["value"],
374
+ )
375
+ obj
376
+ end
377
+ end
378
+
379
+ class << Column =
380
+ Base.new(:name, :type)
381
+ def decode(hash)
382
+ unless hash.is_a?(Hash)
383
+ raise TypeError, "Can't convert #{hash.class} to Hash"
384
+ end
385
+ obj = allocate
386
+ obj.send(:initialize_struct,
387
+ hash["name"],
388
+ hash["type"],
389
+ )
390
+ obj
391
+ end
392
+ end
393
+
394
+ class << DeleteNode =
395
+ Base.new(:id, :source, :target, :row_id, :outputs)
396
+ def decode(hash)
397
+ unless hash.is_a?(Hash)
398
+ raise TypeError, "Can't convert #{hash.class} to Hash"
399
+ end
400
+ obj = allocate
401
+ obj.send(:initialize_struct,
402
+ hash["id"],
403
+ hash["source"] && PlanNode.decode(hash["source"]),
404
+ hash["target"] && DeleteHandle.decode(hash["target"]),
405
+ hash["rowId"],
406
+ hash["outputs"],
407
+ )
408
+ obj
409
+ end
410
+ end
411
+
412
+ class << DistinctLimitNode =
413
+ Base.new(:id, :source, :limit, :partial, :hash_symbol)
414
+ def decode(hash)
415
+ unless hash.is_a?(Hash)
416
+ raise TypeError, "Can't convert #{hash.class} to Hash"
417
+ end
418
+ obj = allocate
419
+ obj.send(:initialize_struct,
420
+ hash["id"],
421
+ hash["source"] && PlanNode.decode(hash["source"]),
422
+ hash["limit"],
423
+ hash["partial"],
424
+ hash["hashSymbol"],
425
+ )
426
+ obj
427
+ end
428
+ end
429
+
430
+ class << DriverStats =
431
+ Base.new(:create_time, :start_time, :end_time, :queued_time, :elapsed_time, :memory_reservation, :system_memory_reservation, :total_scheduled_time, :total_cpu_time, :total_user_time, :total_blocked_time, :fully_blocked, :blocked_reasons, :raw_input_data_size, :raw_input_positions, :raw_input_read_time, :processed_input_data_size, :processed_input_positions, :output_data_size, :output_positions, :operator_stats)
432
+ def decode(hash)
433
+ unless hash.is_a?(Hash)
434
+ raise TypeError, "Can't convert #{hash.class} to Hash"
435
+ end
436
+ obj = allocate
437
+ obj.send(:initialize_struct,
438
+ hash["createTime"],
439
+ hash["startTime"],
440
+ hash["endTime"],
441
+ hash["queuedTime"],
442
+ hash["elapsedTime"],
443
+ hash["memoryReservation"],
444
+ hash["systemMemoryReservation"],
445
+ hash["totalScheduledTime"],
446
+ hash["totalCpuTime"],
447
+ hash["totalUserTime"],
448
+ hash["totalBlockedTime"],
449
+ hash["fullyBlocked"],
450
+ hash["blockedReasons"] && hash["blockedReasons"].map {|h| h.downcase.to_sym },
451
+ hash["rawInputDataSize"],
452
+ hash["rawInputPositions"],
453
+ hash["rawInputReadTime"],
454
+ hash["processedInputDataSize"],
455
+ hash["processedInputPositions"],
456
+ hash["outputDataSize"],
457
+ hash["outputPositions"],
458
+ hash["operatorStats"] && hash["operatorStats"].map {|h| OperatorStats.decode(h) },
459
+ )
460
+ obj
461
+ end
462
+ end
463
+
464
+ class << EnforceSingleRowNode =
465
+ Base.new(:id, :source)
466
+ def decode(hash)
467
+ unless hash.is_a?(Hash)
468
+ raise TypeError, "Can't convert #{hash.class} to Hash"
469
+ end
470
+ obj = allocate
471
+ obj.send(:initialize_struct,
472
+ hash["id"],
473
+ hash["source"] && PlanNode.decode(hash["source"]),
474
+ )
475
+ obj
476
+ end
477
+ end
478
+
479
+ class << ErrorCode =
480
+ Base.new(:code, :name)
481
+ def decode(hash)
482
+ unless hash.is_a?(Hash)
483
+ raise TypeError, "Can't convert #{hash.class} to Hash"
484
+ end
485
+ obj = allocate
486
+ obj.send(:initialize_struct,
487
+ hash["code"],
488
+ hash["name"],
489
+ )
490
+ obj
491
+ end
492
+ end
493
+
494
+ class << ErrorLocation =
495
+ Base.new(:line_number, :column_number)
496
+ def decode(hash)
497
+ unless hash.is_a?(Hash)
498
+ raise TypeError, "Can't convert #{hash.class} to Hash"
499
+ end
500
+ obj = allocate
501
+ obj.send(:initialize_struct,
502
+ hash["lineNumber"],
503
+ hash["columnNumber"],
504
+ )
505
+ obj
506
+ end
507
+ end
508
+
509
+ class << ExchangeNode =
510
+ Base.new(:id, :type, :scope, :partitioning_scheme, :sources, :inputs)
511
+ def decode(hash)
512
+ unless hash.is_a?(Hash)
513
+ raise TypeError, "Can't convert #{hash.class} to Hash"
514
+ end
515
+ obj = allocate
516
+ obj.send(:initialize_struct,
517
+ hash["id"],
518
+ hash["type"],
519
+ hash["scope"] && hash["scope"].downcase.to_sym,
520
+ hash["partitioningScheme"] && PartitioningScheme.decode(hash["partitioningScheme"]),
521
+ hash["sources"] && hash["sources"].map {|h| PlanNode.decode(h) },
522
+ hash["inputs"],
523
+ )
524
+ obj
525
+ end
526
+ end
527
+
528
+ class << ExecutionFailureInfo =
529
+ Base.new(:type, :message, :cause, :suppressed, :stack, :error_location, :error_code)
530
+ def decode(hash)
531
+ unless hash.is_a?(Hash)
532
+ raise TypeError, "Can't convert #{hash.class} to Hash"
533
+ end
534
+ obj = allocate
535
+ obj.send(:initialize_struct,
536
+ hash["type"],
537
+ hash["message"],
538
+ hash["cause"] && ExecutionFailureInfo.decode(hash["cause"]),
539
+ hash["suppressed"] && hash["suppressed"].map {|h| ExecutionFailureInfo.decode(h) },
540
+ hash["stack"],
541
+ hash["errorLocation"] && ErrorLocation.decode(hash["errorLocation"]),
542
+ hash["errorCode"] && ErrorCode.decode(hash["errorCode"]),
543
+ )
544
+ obj
545
+ end
546
+ end
547
+
548
+ class << ExplainAnalyzeNode =
549
+ Base.new(:id, :source, :output_symbol)
550
+ def decode(hash)
551
+ unless hash.is_a?(Hash)
552
+ raise TypeError, "Can't convert #{hash.class} to Hash"
553
+ end
554
+ obj = allocate
555
+ obj.send(:initialize_struct,
556
+ hash["id"],
557
+ hash["source"] && PlanNode.decode(hash["source"]),
558
+ hash["outputSymbol"],
559
+ )
560
+ obj
561
+ end
562
+ end
563
+
564
+ class << FailureInfo =
565
+ Base.new(:type, :message, :cause, :suppressed, :stack, :error_location)
566
+ def decode(hash)
567
+ unless hash.is_a?(Hash)
568
+ raise TypeError, "Can't convert #{hash.class} to Hash"
569
+ end
570
+ obj = allocate
571
+ obj.send(:initialize_struct,
572
+ hash["type"],
573
+ hash["message"],
574
+ hash["cause"] && FailureInfo.decode(hash["cause"]),
575
+ hash["suppressed"] && hash["suppressed"].map {|h| FailureInfo.decode(h) },
576
+ hash["stack"],
577
+ hash["errorLocation"] && ErrorLocation.decode(hash["errorLocation"]),
578
+ )
579
+ obj
580
+ end
581
+ end
582
+
583
+ class << FilterNode =
584
+ Base.new(:id, :source, :predicate)
585
+ def decode(hash)
586
+ unless hash.is_a?(Hash)
587
+ raise TypeError, "Can't convert #{hash.class} to Hash"
588
+ end
589
+ obj = allocate
590
+ obj.send(:initialize_struct,
591
+ hash["id"],
592
+ hash["source"] && PlanNode.decode(hash["source"]),
593
+ hash["predicate"],
594
+ )
595
+ obj
596
+ end
597
+ end
598
+
599
+ class << GroupIdNode =
600
+ Base.new(:id, :source, :grouping_sets, :identity_mappings, :group_id_symbol)
601
+ def decode(hash)
602
+ unless hash.is_a?(Hash)
603
+ raise TypeError, "Can't convert #{hash.class} to Hash"
604
+ end
605
+ obj = allocate
606
+ obj.send(:initialize_struct,
607
+ hash["id"],
608
+ hash["source"] && PlanNode.decode(hash["source"]),
609
+ hash["groupingSets"],
610
+ hash["identityMappings"],
611
+ hash["groupIdSymbol"],
612
+ )
613
+ obj
614
+ end
615
+ end
616
+
617
+ class << IndexHandle =
618
+ Base.new(:connector_id, :transaction_handle, :connector_handle)
619
+ def decode(hash)
620
+ unless hash.is_a?(Hash)
621
+ raise TypeError, "Can't convert #{hash.class} to Hash"
622
+ end
623
+ obj = allocate
624
+ obj.send(:initialize_struct,
625
+ hash["connectorId"],
626
+ hash["transactionHandle"],
627
+ hash["connectorHandle"],
628
+ )
629
+ obj
630
+ end
631
+ end
632
+
633
+ class << IndexJoinNode =
634
+ Base.new(:id, :type, :probe_source, :index_source, :criteria, :probe_hash_symbol, :index_hash_symbol)
635
+ def decode(hash)
636
+ unless hash.is_a?(Hash)
637
+ raise TypeError, "Can't convert #{hash.class} to Hash"
638
+ end
639
+ obj = allocate
640
+ obj.send(:initialize_struct,
641
+ hash["id"],
642
+ hash["type"],
643
+ hash["probeSource"] && PlanNode.decode(hash["probeSource"]),
644
+ hash["indexSource"] && PlanNode.decode(hash["indexSource"]),
645
+ hash["criteria"] && hash["criteria"].map {|h| EquiJoinClause.decode(h) },
646
+ hash["probeHashSymbol"],
647
+ hash["indexHashSymbol"],
648
+ )
649
+ obj
650
+ end
651
+ end
652
+
653
+ class << IndexSourceNode =
654
+ Base.new(:id, :index_handle, :table_handle, :lookup_symbols, :output_symbols, :assignments, :effective_tuple_domain)
655
+ def decode(hash)
656
+ unless hash.is_a?(Hash)
657
+ raise TypeError, "Can't convert #{hash.class} to Hash"
658
+ end
659
+ obj = allocate
660
+ obj.send(:initialize_struct,
661
+ hash["id"],
662
+ hash["indexHandle"] && IndexHandle.decode(hash["indexHandle"]),
663
+ hash["tableHandle"] && TableHandle.decode(hash["tableHandle"]),
664
+ hash["lookupSymbols"],
665
+ hash["outputSymbols"],
666
+ hash["assignments"],
667
+ hash["effectiveTupleDomain"],
668
+ )
669
+ obj
670
+ end
671
+ end
672
+
673
+ class << Input =
674
+ Base.new(:connector_id, :schema, :table, :columns)
675
+ def decode(hash)
676
+ unless hash.is_a?(Hash)
677
+ raise TypeError, "Can't convert #{hash.class} to Hash"
678
+ end
679
+ obj = allocate
680
+ obj.send(:initialize_struct,
681
+ hash["connectorId"],
682
+ hash["schema"],
683
+ hash["table"],
684
+ hash["columns"] && hash["columns"].map {|h| Column.decode(h) },
685
+ )
686
+ obj
687
+ end
688
+ end
689
+
690
+ class << InsertTableHandle =
691
+ Base.new(:connector_id, :transaction_handle, :connector_handle)
692
+ def decode(hash)
693
+ unless hash.is_a?(Hash)
694
+ raise TypeError, "Can't convert #{hash.class} to Hash"
695
+ end
696
+ obj = allocate
697
+ obj.send(:initialize_struct,
698
+ hash["connectorId"],
699
+ hash["transactionHandle"],
700
+ hash["connectorHandle"],
701
+ )
702
+ obj
703
+ end
704
+ end
705
+
706
+ class << JoinNode =
707
+ Base.new(:id, :type, :left, :right, :criteria, :filter, :left_hash_symbol, :right_hash_symbol)
708
+ def decode(hash)
709
+ unless hash.is_a?(Hash)
710
+ raise TypeError, "Can't convert #{hash.class} to Hash"
711
+ end
712
+ obj = allocate
713
+ obj.send(:initialize_struct,
714
+ hash["id"],
715
+ hash["type"],
716
+ hash["left"] && PlanNode.decode(hash["left"]),
717
+ hash["right"] && PlanNode.decode(hash["right"]),
718
+ hash["criteria"] && hash["criteria"].map {|h| EquiJoinClause.decode(h) },
719
+ hash["filter"],
720
+ hash["leftHashSymbol"],
721
+ hash["rightHashSymbol"],
722
+ )
723
+ obj
724
+ end
725
+ end
726
+
727
+ class << LimitNode =
728
+ Base.new(:id, :source, :count, :partial)
729
+ def decode(hash)
730
+ unless hash.is_a?(Hash)
731
+ raise TypeError, "Can't convert #{hash.class} to Hash"
732
+ end
733
+ obj = allocate
734
+ obj.send(:initialize_struct,
735
+ hash["id"],
736
+ hash["source"] && PlanNode.decode(hash["source"]),
737
+ hash["count"],
738
+ hash["partial"],
739
+ )
740
+ obj
741
+ end
742
+ end
743
+
744
+ class << LongVariableConstraint =
745
+ Base.new(:name, :expression)
746
+ def decode(hash)
747
+ unless hash.is_a?(Hash)
748
+ raise TypeError, "Can't convert #{hash.class} to Hash"
749
+ end
750
+ obj = allocate
751
+ obj.send(:initialize_struct,
752
+ hash["name"],
753
+ hash["expression"],
754
+ )
755
+ obj
756
+ end
757
+ end
758
+
759
+ class << MarkDistinctNode =
760
+ Base.new(:id, :source, :marker_symbol, :distinct_symbols, :hash_symbol)
761
+ def decode(hash)
762
+ unless hash.is_a?(Hash)
763
+ raise TypeError, "Can't convert #{hash.class} to Hash"
764
+ end
765
+ obj = allocate
766
+ obj.send(:initialize_struct,
767
+ hash["id"],
768
+ hash["source"] && PlanNode.decode(hash["source"]),
769
+ hash["markerSymbol"],
770
+ hash["distinctSymbols"],
771
+ hash["hashSymbol"],
772
+ )
773
+ obj
774
+ end
775
+ end
776
+
777
+ class << MetadataDeleteNode =
778
+ Base.new(:id, :target, :output, :table_layout)
779
+ def decode(hash)
780
+ unless hash.is_a?(Hash)
781
+ raise TypeError, "Can't convert #{hash.class} to Hash"
782
+ end
783
+ obj = allocate
784
+ obj.send(:initialize_struct,
785
+ hash["id"],
786
+ hash["target"] && DeleteHandle.decode(hash["target"]),
787
+ hash["output"],
788
+ hash["tableLayout"] && TableLayoutHandle.decode(hash["tableLayout"]),
789
+ )
790
+ obj
791
+ end
792
+ end
793
+
794
+ class << OperatorStats =
795
+ Base.new(:operator_id, :plan_node_id, :operator_type, :add_input_calls, :add_input_wall, :add_input_cpu, :add_input_user, :input_data_size, :input_positions, :get_output_calls, :get_output_wall, :get_output_cpu, :get_output_user, :output_data_size, :output_positions, :blocked_wall, :finish_calls, :finish_wall, :finish_cpu, :finish_user, :memory_reservation, :system_memory_reservation, :blocked_reason, :info)
796
+ def decode(hash)
797
+ unless hash.is_a?(Hash)
798
+ raise TypeError, "Can't convert #{hash.class} to Hash"
799
+ end
800
+ obj = allocate
801
+ obj.send(:initialize_struct,
802
+ hash["operatorId"],
803
+ hash["planNodeId"],
804
+ hash["operatorType"],
805
+ hash["addInputCalls"],
806
+ hash["addInputWall"],
807
+ hash["addInputCpu"],
808
+ hash["addInputUser"],
809
+ hash["inputDataSize"],
810
+ hash["inputPositions"],
811
+ hash["getOutputCalls"],
812
+ hash["getOutputWall"],
813
+ hash["getOutputCpu"],
814
+ hash["getOutputUser"],
815
+ hash["outputDataSize"],
816
+ hash["outputPositions"],
817
+ hash["blockedWall"],
818
+ hash["finishCalls"],
819
+ hash["finishWall"],
820
+ hash["finishCpu"],
821
+ hash["finishUser"],
822
+ hash["memoryReservation"],
823
+ hash["systemMemoryReservation"],
824
+ hash["blockedReason"] && hash["blockedReason"].downcase.to_sym,
825
+ hash["info"],
826
+ )
827
+ obj
828
+ end
829
+ end
830
+
831
+ class << OutputNode =
832
+ Base.new(:id, :source, :columns, :outputs)
833
+ def decode(hash)
834
+ unless hash.is_a?(Hash)
835
+ raise TypeError, "Can't convert #{hash.class} to Hash"
836
+ end
837
+ obj = allocate
838
+ obj.send(:initialize_struct,
839
+ hash["id"],
840
+ hash["source"] && PlanNode.decode(hash["source"]),
841
+ hash["columns"],
842
+ hash["outputs"],
843
+ )
844
+ obj
845
+ end
846
+ end
847
+
848
+ class << OutputTableHandle =
849
+ Base.new(:connector_id, :transaction_handle, :connector_handle)
850
+ def decode(hash)
851
+ unless hash.is_a?(Hash)
852
+ raise TypeError, "Can't convert #{hash.class} to Hash"
853
+ end
854
+ obj = allocate
855
+ obj.send(:initialize_struct,
856
+ hash["connectorId"],
857
+ hash["transactionHandle"],
858
+ hash["connectorHandle"],
859
+ )
860
+ obj
861
+ end
862
+ end
863
+
864
+ class << PageBufferInfo =
865
+ Base.new(:partition, :buffered_pages, :buffered_bytes, :rows_added, :pages_added)
866
+ def decode(hash)
867
+ unless hash.is_a?(Hash)
868
+ raise TypeError, "Can't convert #{hash.class} to Hash"
869
+ end
870
+ obj = allocate
871
+ obj.send(:initialize_struct,
872
+ hash["partition"],
873
+ hash["bufferedPages"],
874
+ hash["bufferedBytes"],
875
+ hash["rowsAdded"],
876
+ hash["pagesAdded"],
877
+ )
878
+ obj
879
+ end
880
+ end
881
+
882
+ class << Partitioning =
883
+ Base.new(:handle, :arguments)
884
+ def decode(hash)
885
+ unless hash.is_a?(Hash)
886
+ raise TypeError, "Can't convert #{hash.class} to Hash"
887
+ end
888
+ obj = allocate
889
+ obj.send(:initialize_struct,
890
+ hash["handle"] && PartitioningHandle.decode(hash["handle"]),
891
+ hash["arguments"] && hash["arguments"].map {|h| ArgumentBinding.decode(h) },
892
+ )
893
+ obj
894
+ end
895
+ end
896
+
897
+ class << PartitioningHandle =
898
+ Base.new(:connector_id, :transaction_handle, :connector_handle)
899
+ def decode(hash)
900
+ unless hash.is_a?(Hash)
901
+ raise TypeError, "Can't convert #{hash.class} to Hash"
902
+ end
903
+ obj = allocate
904
+ obj.send(:initialize_struct,
905
+ hash["connectorId"],
906
+ hash["transactionHandle"],
907
+ hash["connectorHandle"],
908
+ )
909
+ obj
910
+ end
911
+ end
912
+
913
+ class << PartitioningScheme =
914
+ Base.new(:partitioning, :output_layout, :hash_column, :replicate_nulls, :bucket_to_partition)
915
+ def decode(hash)
916
+ unless hash.is_a?(Hash)
917
+ raise TypeError, "Can't convert #{hash.class} to Hash"
918
+ end
919
+ obj = allocate
920
+ obj.send(:initialize_struct,
921
+ hash["partitioning"] && Partitioning.decode(hash["partitioning"]),
922
+ hash["outputLayout"],
923
+ hash["hashColumn"],
924
+ hash["replicateNulls"],
925
+ hash["bucketToPartition"],
926
+ )
927
+ obj
928
+ end
929
+ end
930
+
931
+ class << PipelineStats =
932
+ Base.new(:first_start_time, :last_start_time, :last_end_time, :input_pipeline, :output_pipeline, :total_drivers, :queued_drivers, :queued_partitioned_drivers, :running_drivers, :running_partitioned_drivers, :completed_drivers, :memory_reservation, :system_memory_reservation, :queued_time, :elapsed_time, :total_scheduled_time, :total_cpu_time, :total_user_time, :total_blocked_time, :fully_blocked, :blocked_reasons, :raw_input_data_size, :raw_input_positions, :processed_input_data_size, :processed_input_positions, :output_data_size, :output_positions, :operator_summaries, :drivers)
933
+ def decode(hash)
934
+ unless hash.is_a?(Hash)
935
+ raise TypeError, "Can't convert #{hash.class} to Hash"
936
+ end
937
+ obj = allocate
938
+ obj.send(:initialize_struct,
939
+ hash["firstStartTime"],
940
+ hash["lastStartTime"],
941
+ hash["lastEndTime"],
942
+ hash["inputPipeline"],
943
+ hash["outputPipeline"],
944
+ hash["totalDrivers"],
945
+ hash["queuedDrivers"],
946
+ hash["queuedPartitionedDrivers"],
947
+ hash["runningDrivers"],
948
+ hash["runningPartitionedDrivers"],
949
+ hash["completedDrivers"],
950
+ hash["memoryReservation"],
951
+ hash["systemMemoryReservation"],
952
+ hash["queuedTime"] && DistributionSnapshot.decode(hash["queuedTime"]),
953
+ hash["elapsedTime"] && DistributionSnapshot.decode(hash["elapsedTime"]),
954
+ hash["totalScheduledTime"],
955
+ hash["totalCpuTime"],
956
+ hash["totalUserTime"],
957
+ hash["totalBlockedTime"],
958
+ hash["fullyBlocked"],
959
+ hash["blockedReasons"] && hash["blockedReasons"].map {|h| h.downcase.to_sym },
960
+ hash["rawInputDataSize"],
961
+ hash["rawInputPositions"],
962
+ hash["processedInputDataSize"],
963
+ hash["processedInputPositions"],
964
+ hash["outputDataSize"],
965
+ hash["outputPositions"],
966
+ hash["operatorSummaries"] && hash["operatorSummaries"].map {|h| OperatorStats.decode(h) },
967
+ hash["drivers"] && hash["drivers"].map {|h| DriverStats.decode(h) },
968
+ )
969
+ obj
970
+ end
971
+ end
972
+
973
+ class << PlanFragment =
974
+ Base.new(:id, :root, :symbols, :partitioning, :partitioned_sources, :partitioning_scheme)
975
+ def decode(hash)
976
+ unless hash.is_a?(Hash)
977
+ raise TypeError, "Can't convert #{hash.class} to Hash"
978
+ end
979
+ obj = allocate
980
+ obj.send(:initialize_struct,
981
+ hash["id"],
982
+ hash["root"] && PlanNode.decode(hash["root"]),
983
+ hash["symbols"],
984
+ hash["partitioning"] && PartitioningHandle.decode(hash["partitioning"]),
985
+ hash["partitionedSources"],
986
+ hash["partitioningScheme"] && PartitioningScheme.decode(hash["partitioningScheme"]),
987
+ )
988
+ obj
989
+ end
990
+ end
991
+
992
+ class << ProjectNode =
993
+ Base.new(:id, :source, :assignments)
994
+ def decode(hash)
995
+ unless hash.is_a?(Hash)
996
+ raise TypeError, "Can't convert #{hash.class} to Hash"
997
+ end
998
+ obj = allocate
999
+ obj.send(:initialize_struct,
1000
+ hash["id"],
1001
+ hash["source"] && PlanNode.decode(hash["source"]),
1002
+ hash["assignments"],
1003
+ )
1004
+ obj
1005
+ end
1006
+ end
1007
+
1008
+ class << QueryError =
1009
+ Base.new(:message, :sql_state, :error_code, :error_name, :error_type, :error_location, :failure_info)
1010
+ def decode(hash)
1011
+ unless hash.is_a?(Hash)
1012
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1013
+ end
1014
+ obj = allocate
1015
+ obj.send(:initialize_struct,
1016
+ hash["message"],
1017
+ hash["sqlState"],
1018
+ hash["errorCode"],
1019
+ hash["errorName"],
1020
+ hash["errorType"],
1021
+ hash["errorLocation"] && ErrorLocation.decode(hash["errorLocation"]),
1022
+ hash["failureInfo"] && FailureInfo.decode(hash["failureInfo"]),
1023
+ )
1024
+ obj
1025
+ end
1026
+ end
1027
+
1028
+ class << QueryInfo =
1029
+ Base.new(:query_id, :session, :state, :memory_pool, :scheduled, :self, :field_names, :query, :query_stats, :set_session_properties, :reset_session_properties, :added_prepared_statements, :deallocated_prepared_statements, :started_transaction_id, :clear_transaction_id, :update_type, :output_stage, :failure_info, :error_code, :inputs)
1030
+ def decode(hash)
1031
+ unless hash.is_a?(Hash)
1032
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1033
+ end
1034
+ obj = allocate
1035
+ obj.send(:initialize_struct,
1036
+ hash["queryId"],
1037
+ hash["session"] && SessionRepresentation.decode(hash["session"]),
1038
+ hash["state"] && hash["state"].downcase.to_sym,
1039
+ hash["memoryPool"],
1040
+ hash["scheduled"],
1041
+ hash["self"],
1042
+ hash["fieldNames"],
1043
+ hash["query"],
1044
+ hash["queryStats"] && QueryStats.decode(hash["queryStats"]),
1045
+ hash["setSessionProperties"],
1046
+ hash["resetSessionProperties"],
1047
+ hash["addedPreparedStatements"],
1048
+ hash["deallocatedPreparedStatements"],
1049
+ hash["startedTransactionId"],
1050
+ hash["clearTransactionId"],
1051
+ hash["updateType"],
1052
+ hash["outputStage"] && StageInfo.decode(hash["outputStage"]),
1053
+ hash["failureInfo"] && FailureInfo.decode(hash["failureInfo"]),
1054
+ hash["errorCode"] && ErrorCode.decode(hash["errorCode"]),
1055
+ hash["inputs"] && hash["inputs"].map {|h| Input.decode(h) },
1056
+ )
1057
+ obj
1058
+ end
1059
+ end
1060
+
1061
+ class << QueryResults =
1062
+ Base.new(:id, :info_uri, :partial_cancel_uri, :next_uri, :columns, :data, :stats, :error, :update_type, :update_count)
1063
+ def decode(hash)
1064
+ unless hash.is_a?(Hash)
1065
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1066
+ end
1067
+ obj = allocate
1068
+ obj.send(:initialize_struct,
1069
+ hash["id"],
1070
+ hash["infoUri"],
1071
+ hash["partialCancelUri"],
1072
+ hash["nextUri"],
1073
+ hash["columns"] && hash["columns"].map {|h| ClientColumn.decode(h) },
1074
+ hash["data"],
1075
+ hash["stats"] && StatementStats.decode(hash["stats"]),
1076
+ hash["error"] && QueryError.decode(hash["error"]),
1077
+ hash["updateType"],
1078
+ hash["updateCount"],
1079
+ )
1080
+ obj
1081
+ end
1082
+ end
1083
+
1084
+ class << QueryStats =
1085
+ Base.new(:create_time, :execution_start_time, :last_heartbeat, :end_time, :elapsed_time, :queued_time, :analysis_time, :distributed_planning_time, :total_planning_time, :finishing_time, :total_tasks, :running_tasks, :completed_tasks, :total_drivers, :queued_drivers, :running_drivers, :completed_drivers, :cumulative_memory, :total_memory_reservation, :peak_memory_reservation, :total_scheduled_time, :total_cpu_time, :total_user_time, :total_blocked_time, :fully_blocked, :blocked_reasons, :raw_input_data_size, :raw_input_positions, :processed_input_data_size, :processed_input_positions, :output_data_size, :output_positions)
1086
+ def decode(hash)
1087
+ unless hash.is_a?(Hash)
1088
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1089
+ end
1090
+ obj = allocate
1091
+ obj.send(:initialize_struct,
1092
+ hash["createTime"],
1093
+ hash["executionStartTime"],
1094
+ hash["lastHeartbeat"],
1095
+ hash["endTime"],
1096
+ hash["elapsedTime"],
1097
+ hash["queuedTime"],
1098
+ hash["analysisTime"],
1099
+ hash["distributedPlanningTime"],
1100
+ hash["totalPlanningTime"],
1101
+ hash["finishingTime"],
1102
+ hash["totalTasks"],
1103
+ hash["runningTasks"],
1104
+ hash["completedTasks"],
1105
+ hash["totalDrivers"],
1106
+ hash["queuedDrivers"],
1107
+ hash["runningDrivers"],
1108
+ hash["completedDrivers"],
1109
+ hash["cumulativeMemory"],
1110
+ hash["totalMemoryReservation"],
1111
+ hash["peakMemoryReservation"],
1112
+ hash["totalScheduledTime"],
1113
+ hash["totalCpuTime"],
1114
+ hash["totalUserTime"],
1115
+ hash["totalBlockedTime"],
1116
+ hash["fullyBlocked"],
1117
+ hash["blockedReasons"] && hash["blockedReasons"].map {|h| h.downcase.to_sym },
1118
+ hash["rawInputDataSize"],
1119
+ hash["rawInputPositions"],
1120
+ hash["processedInputDataSize"],
1121
+ hash["processedInputPositions"],
1122
+ hash["outputDataSize"],
1123
+ hash["outputPositions"],
1124
+ )
1125
+ obj
1126
+ end
1127
+ end
1128
+
1129
+ class << RemoteSourceNode =
1130
+ Base.new(:id, :source_fragment_ids, :outputs)
1131
+ def decode(hash)
1132
+ unless hash.is_a?(Hash)
1133
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1134
+ end
1135
+ obj = allocate
1136
+ obj.send(:initialize_struct,
1137
+ hash["id"],
1138
+ hash["sourceFragmentIds"],
1139
+ hash["outputs"],
1140
+ )
1141
+ obj
1142
+ end
1143
+ end
1144
+
1145
+ class << RowNumberNode =
1146
+ Base.new(:id, :source, :partition_by, :row_number_symbol, :max_row_count_per_partition, :hash_symbol)
1147
+ def decode(hash)
1148
+ unless hash.is_a?(Hash)
1149
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1150
+ end
1151
+ obj = allocate
1152
+ obj.send(:initialize_struct,
1153
+ hash["id"],
1154
+ hash["source"] && PlanNode.decode(hash["source"]),
1155
+ hash["partitionBy"],
1156
+ hash["rowNumberSymbol"],
1157
+ hash["maxRowCountPerPartition"],
1158
+ hash["hashSymbol"],
1159
+ )
1160
+ obj
1161
+ end
1162
+ end
1163
+
1164
+ class << SampleNode =
1165
+ Base.new(:id, :source, :sample_ratio, :sample_type, :rescaled, :sample_weight_symbol)
1166
+ def decode(hash)
1167
+ unless hash.is_a?(Hash)
1168
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1169
+ end
1170
+ obj = allocate
1171
+ obj.send(:initialize_struct,
1172
+ hash["id"],
1173
+ hash["source"] && PlanNode.decode(hash["source"]),
1174
+ hash["sampleRatio"],
1175
+ hash["sampleType"],
1176
+ hash["rescaled"],
1177
+ hash["sampleWeightSymbol"],
1178
+ )
1179
+ obj
1180
+ end
1181
+ end
1182
+
1183
+ class << SemiJoinNode =
1184
+ Base.new(:id, :source, :filtering_source, :source_join_symbol, :filtering_source_join_symbol, :semi_join_output, :source_hash_symbol, :filtering_source_hash_symbol)
1185
+ def decode(hash)
1186
+ unless hash.is_a?(Hash)
1187
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1188
+ end
1189
+ obj = allocate
1190
+ obj.send(:initialize_struct,
1191
+ hash["id"],
1192
+ hash["source"] && PlanNode.decode(hash["source"]),
1193
+ hash["filteringSource"] && PlanNode.decode(hash["filteringSource"]),
1194
+ hash["sourceJoinSymbol"],
1195
+ hash["filteringSourceJoinSymbol"],
1196
+ hash["semiJoinOutput"],
1197
+ hash["sourceHashSymbol"],
1198
+ hash["filteringSourceHashSymbol"],
1199
+ )
1200
+ obj
1201
+ end
1202
+ end
1203
+
1204
+ class << SessionRepresentation =
1205
+ Base.new(:query_id, :transaction_id, :client_transaction_support, :user, :principal, :source, :catalog, :schema, :time_zone_key, :locale, :remote_user_address, :user_agent, :start_time, :system_properties, :catalog_properties, :prepared_statements)
1206
+ def decode(hash)
1207
+ unless hash.is_a?(Hash)
1208
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1209
+ end
1210
+ obj = allocate
1211
+ obj.send(:initialize_struct,
1212
+ hash["queryId"],
1213
+ hash["transactionId"],
1214
+ hash["clientTransactionSupport"],
1215
+ hash["user"],
1216
+ hash["principal"],
1217
+ hash["source"],
1218
+ hash["catalog"],
1219
+ hash["schema"],
1220
+ hash["timeZoneKey"],
1221
+ hash["locale"],
1222
+ hash["remoteUserAddress"],
1223
+ hash["userAgent"],
1224
+ hash["startTime"],
1225
+ hash["systemProperties"],
1226
+ hash["catalogProperties"],
1227
+ hash["preparedStatements"],
1228
+ )
1229
+ obj
1230
+ end
1231
+ end
1232
+
1233
+ class << SharedBufferInfo =
1234
+ Base.new(:state, :can_add_buffers, :can_add_pages, :total_buffered_bytes, :total_buffered_pages, :total_rows_sent, :total_pages_sent, :buffers)
1235
+ def decode(hash)
1236
+ unless hash.is_a?(Hash)
1237
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1238
+ end
1239
+ obj = allocate
1240
+ obj.send(:initialize_struct,
1241
+ hash["state"] && hash["state"].downcase.to_sym,
1242
+ hash["canAddBuffers"],
1243
+ hash["canAddPages"],
1244
+ hash["totalBufferedBytes"],
1245
+ hash["totalBufferedPages"],
1246
+ hash["totalRowsSent"],
1247
+ hash["totalPagesSent"],
1248
+ hash["buffers"] && hash["buffers"].map {|h| BufferInfo.decode(h) },
1249
+ )
1250
+ obj
1251
+ end
1252
+ end
1253
+
1254
+ class << Signature =
1255
+ Base.new(:name, :kind, :type_variable_constraints, :long_variable_constraints, :return_type, :argument_types, :variable_arity)
1256
+ def decode(hash)
1257
+ unless hash.is_a?(Hash)
1258
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1259
+ end
1260
+ obj = allocate
1261
+ obj.send(:initialize_struct,
1262
+ hash["name"],
1263
+ hash["kind"] && hash["kind"].downcase.to_sym,
1264
+ hash["typeVariableConstraints"] && hash["typeVariableConstraints"].map {|h| TypeVariableConstraint.decode(h) },
1265
+ hash["longVariableConstraints"] && hash["longVariableConstraints"].map {|h| LongVariableConstraint.decode(h) },
1266
+ hash["returnType"],
1267
+ hash["argumentTypes"],
1268
+ hash["variableArity"],
1269
+ )
1270
+ obj
1271
+ end
1272
+ end
1273
+
1274
+ class << SortNode =
1275
+ Base.new(:id, :source, :order_by, :orderings)
1276
+ def decode(hash)
1277
+ unless hash.is_a?(Hash)
1278
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1279
+ end
1280
+ obj = allocate
1281
+ obj.send(:initialize_struct,
1282
+ hash["id"],
1283
+ hash["source"] && PlanNode.decode(hash["source"]),
1284
+ hash["orderBy"],
1285
+ hash["orderings"] && Hash[hash["orderings"].to_a.map! {|k,v| [k, v.downcase.to_sym] }],
1286
+ )
1287
+ obj
1288
+ end
1289
+ end
1290
+
1291
+ class << StageInfo =
1292
+ Base.new(:stage_id, :state, :self, :plan, :types, :stage_stats, :tasks, :sub_stages, :failure_cause)
1293
+ def decode(hash)
1294
+ unless hash.is_a?(Hash)
1295
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1296
+ end
1297
+ obj = allocate
1298
+ obj.send(:initialize_struct,
1299
+ hash["stageId"] && StageId.new(hash["stageId"]),
1300
+ hash["state"] && hash["state"].downcase.to_sym,
1301
+ hash["self"],
1302
+ hash["plan"] && PlanFragment.decode(hash["plan"]),
1303
+ hash["types"],
1304
+ hash["stageStats"] && StageStats.decode(hash["stageStats"]),
1305
+ hash["tasks"] && hash["tasks"].map {|h| TaskInfo.decode(h) },
1306
+ hash["subStages"] && hash["subStages"].map {|h| StageInfo.decode(h) },
1307
+ hash["failureCause"] && ExecutionFailureInfo.decode(hash["failureCause"]),
1308
+ )
1309
+ obj
1310
+ end
1311
+ end
1312
+
1313
+ class << StageStats =
1314
+ Base.new(:scheduling_complete, :get_split_distribution, :schedule_task_distribution, :add_split_distribution, :total_tasks, :running_tasks, :completed_tasks, :total_drivers, :queued_drivers, :running_drivers, :completed_drivers, :cumulative_memory, :total_memory_reservation, :peak_memory_reservation, :total_scheduled_time, :total_cpu_time, :total_user_time, :total_blocked_time, :fully_blocked, :blocked_reasons, :raw_input_data_size, :raw_input_positions, :processed_input_data_size, :processed_input_positions, :output_data_size, :output_positions)
1315
+ def decode(hash)
1316
+ unless hash.is_a?(Hash)
1317
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1318
+ end
1319
+ obj = allocate
1320
+ obj.send(:initialize_struct,
1321
+ hash["schedulingComplete"],
1322
+ hash["getSplitDistribution"] && DistributionSnapshot.decode(hash["getSplitDistribution"]),
1323
+ hash["scheduleTaskDistribution"] && DistributionSnapshot.decode(hash["scheduleTaskDistribution"]),
1324
+ hash["addSplitDistribution"] && DistributionSnapshot.decode(hash["addSplitDistribution"]),
1325
+ hash["totalTasks"],
1326
+ hash["runningTasks"],
1327
+ hash["completedTasks"],
1328
+ hash["totalDrivers"],
1329
+ hash["queuedDrivers"],
1330
+ hash["runningDrivers"],
1331
+ hash["completedDrivers"],
1332
+ hash["cumulativeMemory"],
1333
+ hash["totalMemoryReservation"],
1334
+ hash["peakMemoryReservation"],
1335
+ hash["totalScheduledTime"],
1336
+ hash["totalCpuTime"],
1337
+ hash["totalUserTime"],
1338
+ hash["totalBlockedTime"],
1339
+ hash["fullyBlocked"],
1340
+ hash["blockedReasons"] && hash["blockedReasons"].map {|h| h.downcase.to_sym },
1341
+ hash["rawInputDataSize"],
1342
+ hash["rawInputPositions"],
1343
+ hash["processedInputDataSize"],
1344
+ hash["processedInputPositions"],
1345
+ hash["outputDataSize"],
1346
+ hash["outputPositions"],
1347
+ )
1348
+ obj
1349
+ end
1350
+ end
1351
+
1352
+ class << StatementStats =
1353
+ Base.new(:state, :queued, :scheduled, :nodes, :total_splits, :queued_splits, :running_splits, :completed_splits, :user_time_millis, :cpu_time_millis, :wall_time_millis, :processed_rows, :processed_bytes, :root_stage)
1354
+ def decode(hash)
1355
+ unless hash.is_a?(Hash)
1356
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1357
+ end
1358
+ obj = allocate
1359
+ obj.send(:initialize_struct,
1360
+ hash["state"],
1361
+ hash["queued"],
1362
+ hash["scheduled"],
1363
+ hash["nodes"],
1364
+ hash["totalSplits"],
1365
+ hash["queuedSplits"],
1366
+ hash["runningSplits"],
1367
+ hash["completedSplits"],
1368
+ hash["userTimeMillis"],
1369
+ hash["cpuTimeMillis"],
1370
+ hash["wallTimeMillis"],
1371
+ hash["processedRows"],
1372
+ hash["processedBytes"],
1373
+ hash["rootStage"] && ClientStageStats.decode(hash["rootStage"]),
1374
+ )
1375
+ obj
1376
+ end
1377
+ end
1378
+
1379
+ class << TableFinishNode =
1380
+ Base.new(:id, :source, :target, :outputs)
1381
+ def decode(hash)
1382
+ unless hash.is_a?(Hash)
1383
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1384
+ end
1385
+ obj = allocate
1386
+ obj.send(:initialize_struct,
1387
+ hash["id"],
1388
+ hash["source"] && PlanNode.decode(hash["source"]),
1389
+ hash["target"] && WriterTarget.decode(hash["target"]),
1390
+ hash["outputs"],
1391
+ )
1392
+ obj
1393
+ end
1394
+ end
1395
+
1396
+ class << TableHandle =
1397
+ Base.new(:connector_id, :connector_handle)
1398
+ def decode(hash)
1399
+ unless hash.is_a?(Hash)
1400
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1401
+ end
1402
+ obj = allocate
1403
+ obj.send(:initialize_struct,
1404
+ hash["connectorId"],
1405
+ hash["connectorHandle"],
1406
+ )
1407
+ obj
1408
+ end
1409
+ end
1410
+
1411
+ class << TableLayoutHandle =
1412
+ Base.new(:connector_id, :transaction_handle, :connector_handle)
1413
+ def decode(hash)
1414
+ unless hash.is_a?(Hash)
1415
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1416
+ end
1417
+ obj = allocate
1418
+ obj.send(:initialize_struct,
1419
+ hash["connectorId"],
1420
+ hash["transactionHandle"],
1421
+ hash["connectorHandle"],
1422
+ )
1423
+ obj
1424
+ end
1425
+ end
1426
+
1427
+ class << TableScanNode =
1428
+ Base.new(:id, :table, :output_symbols, :assignments, :layout, :current_constraint, :original_constraint)
1429
+ def decode(hash)
1430
+ unless hash.is_a?(Hash)
1431
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1432
+ end
1433
+ obj = allocate
1434
+ obj.send(:initialize_struct,
1435
+ hash["id"],
1436
+ hash["table"] && TableHandle.decode(hash["table"]),
1437
+ hash["outputSymbols"],
1438
+ hash["assignments"],
1439
+ hash["layout"] && TableLayoutHandle.decode(hash["layout"]),
1440
+ hash["currentConstraint"],
1441
+ hash["originalConstraint"],
1442
+ )
1443
+ obj
1444
+ end
1445
+ end
1446
+
1447
+ class << TableWriterNode =
1448
+ Base.new(:id, :source, :target, :columns, :column_names, :outputs, :sample_weight_symbol, :partitioning_scheme)
1449
+ def decode(hash)
1450
+ unless hash.is_a?(Hash)
1451
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1452
+ end
1453
+ obj = allocate
1454
+ obj.send(:initialize_struct,
1455
+ hash["id"],
1456
+ hash["source"] && PlanNode.decode(hash["source"]),
1457
+ hash["target"] && WriterTarget.decode(hash["target"]),
1458
+ hash["columns"],
1459
+ hash["columnNames"],
1460
+ hash["outputs"],
1461
+ hash["sampleWeightSymbol"],
1462
+ hash["partitioningScheme"] && PartitioningScheme.decode(hash["partitioningScheme"]),
1463
+ )
1464
+ obj
1465
+ end
1466
+ end
1467
+
1468
+ class << TaskInfo =
1469
+ Base.new(:task_status, :last_heartbeat, :output_buffers, :no_more_splits, :stats, :needs_plan)
1470
+ def decode(hash)
1471
+ unless hash.is_a?(Hash)
1472
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1473
+ end
1474
+ obj = allocate
1475
+ obj.send(:initialize_struct,
1476
+ hash["taskStatus"] && TaskStatus.decode(hash["taskStatus"]),
1477
+ hash["lastHeartbeat"],
1478
+ hash["outputBuffers"] && SharedBufferInfo.decode(hash["outputBuffers"]),
1479
+ hash["noMoreSplits"],
1480
+ hash["stats"] && TaskStats.decode(hash["stats"]),
1481
+ hash["needsPlan"],
1482
+ )
1483
+ obj
1484
+ end
1485
+ end
1486
+
1487
+ class << TaskStats =
1488
+ Base.new(:create_time, :first_start_time, :last_start_time, :last_end_time, :end_time, :elapsed_time, :queued_time, :total_drivers, :queued_drivers, :queued_partitioned_drivers, :running_drivers, :running_partitioned_drivers, :completed_drivers, :cumulative_memory, :memory_reservation, :system_memory_reservation, :total_scheduled_time, :total_cpu_time, :total_user_time, :total_blocked_time, :fully_blocked, :blocked_reasons, :raw_input_data_size, :raw_input_positions, :processed_input_data_size, :processed_input_positions, :output_data_size, :output_positions, :pipelines)
1489
+ def decode(hash)
1490
+ unless hash.is_a?(Hash)
1491
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1492
+ end
1493
+ obj = allocate
1494
+ obj.send(:initialize_struct,
1495
+ hash["createTime"],
1496
+ hash["firstStartTime"],
1497
+ hash["lastStartTime"],
1498
+ hash["lastEndTime"],
1499
+ hash["endTime"],
1500
+ hash["elapsedTime"],
1501
+ hash["queuedTime"],
1502
+ hash["totalDrivers"],
1503
+ hash["queuedDrivers"],
1504
+ hash["queuedPartitionedDrivers"],
1505
+ hash["runningDrivers"],
1506
+ hash["runningPartitionedDrivers"],
1507
+ hash["completedDrivers"],
1508
+ hash["cumulativeMemory"],
1509
+ hash["memoryReservation"],
1510
+ hash["systemMemoryReservation"],
1511
+ hash["totalScheduledTime"],
1512
+ hash["totalCpuTime"],
1513
+ hash["totalUserTime"],
1514
+ hash["totalBlockedTime"],
1515
+ hash["fullyBlocked"],
1516
+ hash["blockedReasons"] && hash["blockedReasons"].map {|h| h.downcase.to_sym },
1517
+ hash["rawInputDataSize"],
1518
+ hash["rawInputPositions"],
1519
+ hash["processedInputDataSize"],
1520
+ hash["processedInputPositions"],
1521
+ hash["outputDataSize"],
1522
+ hash["outputPositions"],
1523
+ hash["pipelines"] && hash["pipelines"].map {|h| PipelineStats.decode(h) },
1524
+ )
1525
+ obj
1526
+ end
1527
+ end
1528
+
1529
+ class << TaskStatus =
1530
+ Base.new(:task_id, :task_instance_id, :version, :state, :self, :failures, :queued_partitioned_drivers, :running_partitioned_drivers, :memory_reservation)
1531
+ def decode(hash)
1532
+ unless hash.is_a?(Hash)
1533
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1534
+ end
1535
+ obj = allocate
1536
+ obj.send(:initialize_struct,
1537
+ hash["taskId"] && TaskId.new(hash["taskId"]),
1538
+ hash["taskInstanceId"],
1539
+ hash["version"],
1540
+ hash["state"] && hash["state"].downcase.to_sym,
1541
+ hash["self"],
1542
+ hash["failures"] && hash["failures"].map {|h| ExecutionFailureInfo.decode(h) },
1543
+ hash["queuedPartitionedDrivers"],
1544
+ hash["runningPartitionedDrivers"],
1545
+ hash["memoryReservation"],
1546
+ )
1547
+ obj
1548
+ end
1549
+ end
1550
+
1551
+ class << TopNNode =
1552
+ Base.new(:id, :source, :count, :order_by, :orderings, :partial)
1553
+ def decode(hash)
1554
+ unless hash.is_a?(Hash)
1555
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1556
+ end
1557
+ obj = allocate
1558
+ obj.send(:initialize_struct,
1559
+ hash["id"],
1560
+ hash["source"] && PlanNode.decode(hash["source"]),
1561
+ hash["count"],
1562
+ hash["orderBy"],
1563
+ hash["orderings"] && Hash[hash["orderings"].to_a.map! {|k,v| [k, v.downcase.to_sym] }],
1564
+ hash["partial"],
1565
+ )
1566
+ obj
1567
+ end
1568
+ end
1569
+
1570
+ class << TopNRowNumberNode =
1571
+ Base.new(:id, :source, :partition_by, :order_by, :orderings, :row_number_symbol, :max_row_count_per_partition, :partial, :hash_symbol)
1572
+ def decode(hash)
1573
+ unless hash.is_a?(Hash)
1574
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1575
+ end
1576
+ obj = allocate
1577
+ obj.send(:initialize_struct,
1578
+ hash["id"],
1579
+ hash["source"] && PlanNode.decode(hash["source"]),
1580
+ hash["partitionBy"],
1581
+ hash["orderBy"],
1582
+ hash["orderings"] && Hash[hash["orderings"].to_a.map! {|k,v| [k, v.downcase.to_sym] }],
1583
+ hash["rowNumberSymbol"],
1584
+ hash["maxRowCountPerPartition"],
1585
+ hash["partial"],
1586
+ hash["hashSymbol"],
1587
+ )
1588
+ obj
1589
+ end
1590
+ end
1591
+
1592
+ class << TypeVariableConstraint =
1593
+ Base.new(:name, :comparable_required, :orderable_required, :variadic_bound)
1594
+ def decode(hash)
1595
+ unless hash.is_a?(Hash)
1596
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1597
+ end
1598
+ obj = allocate
1599
+ obj.send(:initialize_struct,
1600
+ hash["name"],
1601
+ hash["comparableRequired"],
1602
+ hash["orderableRequired"],
1603
+ hash["variadicBound"],
1604
+ )
1605
+ obj
1606
+ end
1607
+ end
1608
+
1609
+ class << UnionNode =
1610
+ Base.new(:id, :sources, :output_to_inputs, :outputs)
1611
+ def decode(hash)
1612
+ unless hash.is_a?(Hash)
1613
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1614
+ end
1615
+ obj = allocate
1616
+ obj.send(:initialize_struct,
1617
+ hash["id"],
1618
+ hash["sources"] && hash["sources"].map {|h| PlanNode.decode(h) },
1619
+ hash["outputToInputs"],
1620
+ hash["outputs"],
1621
+ )
1622
+ obj
1623
+ end
1624
+ end
1625
+
1626
+ class << UnnestNode =
1627
+ Base.new(:id, :source, :replicate_symbols, :unnest_symbols, :ordinality_symbol)
1628
+ def decode(hash)
1629
+ unless hash.is_a?(Hash)
1630
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1631
+ end
1632
+ obj = allocate
1633
+ obj.send(:initialize_struct,
1634
+ hash["id"],
1635
+ hash["source"] && PlanNode.decode(hash["source"]),
1636
+ hash["replicateSymbols"],
1637
+ hash["unnestSymbols"],
1638
+ hash["ordinalitySymbol"],
1639
+ )
1640
+ obj
1641
+ end
1642
+ end
1643
+
1644
+ class << ValuesNode =
1645
+ Base.new(:id, :output_symbols, :rows)
1646
+ def decode(hash)
1647
+ unless hash.is_a?(Hash)
1648
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1649
+ end
1650
+ obj = allocate
1651
+ obj.send(:initialize_struct,
1652
+ hash["id"],
1653
+ hash["outputSymbols"],
1654
+ hash["rows"],
1655
+ )
1656
+ obj
1657
+ end
1658
+ end
1659
+
1660
+ class << WindowNode =
1661
+ Base.new(:id, :source, :specification, :window_functions, :signatures, :hash_symbol, :pre_partitioned_inputs, :pre_sorted_order_prefix)
1662
+ def decode(hash)
1663
+ unless hash.is_a?(Hash)
1664
+ raise TypeError, "Can't convert #{hash.class} to Hash"
1665
+ end
1666
+ obj = allocate
1667
+ obj.send(:initialize_struct,
1668
+ hash["id"],
1669
+ hash["source"] && PlanNode.decode(hash["source"]),
1670
+ hash["specification"] && Specification.decode(hash["specification"]),
1671
+ hash["windowFunctions"],
1672
+ hash["signatures"] && Hash[hash["signatures"].to_a.map! {|k,v| [k, Signature.decode(v)] }],
1673
+ hash["hashSymbol"],
1674
+ hash["prePartitionedInputs"],
1675
+ hash["preSortedOrderPrefix"],
1676
+ )
1677
+ obj
1678
+ end
1679
+ end
1680
+
1681
+
1682
+ end
1683
+ end