trino-client 1.0.2 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/ChangeLog.md +43 -0
- data/README.md +1 -1
- data/SECURITY.md +7 -0
- data/lib/trino/client/faraday_client.rb +12 -4
- data/lib/trino/client/query.rb +5 -3
- data/lib/trino/client/statement_client.rb +4 -2
- data/lib/trino/client/version.rb +1 -1
- data/lib/trino/client.rb +2 -4
- data/lib/trino-client.rb +1 -1
- data/trino-client.gemspec +16 -14
- metadata +76 -59
- data/.github/CODEOWNERS +0 -1
- data/.github/PULL_REQUEST_TEMPLATE.md +0 -18
- data/.github/workflows/ruby.yml +0 -30
- data/.gitignore +0 -4
- data/Gemfile +0 -6
- data/Rakefile +0 -45
- data/modelgen/model_versions.rb +0 -280
- data/modelgen/modelgen.rb +0 -119
- data/modelgen/models.rb +0 -31
- data/modelgen/trino_models.rb +0 -270
- data/publish.rb +0 -14
- data/release.rb +0 -56
- data/spec/basic_query_spec.rb +0 -82
- data/spec/client_spec.rb +0 -75
- data/spec/gzip_spec.rb +0 -40
- data/spec/model_spec.rb +0 -35
- data/spec/spec_helper.rb +0 -42
- data/spec/statement_client_spec.rb +0 -637
- data/spec/tpch/q01.sql +0 -21
- data/spec/tpch/q02.sql +0 -43
- data/spec/tpch_query_spec.rb +0 -41
- data/trino-client-ruby/lib/trino-client-ruby.rb +0 -1
- data/trino-client-ruby/trino-client-ruby.gemspec +0 -20
data/modelgen/model_versions.rb
DELETED
@@ -1,280 +0,0 @@
|
|
1
|
-
#
|
2
|
-
# Trino client for Ruby
|
3
|
-
#
|
4
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
-
# you may not use this file except in compliance with the License.
|
6
|
-
# You may obtain a copy of the License at
|
7
|
-
#
|
8
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
-
#
|
10
|
-
# Unless required by applicable law or agreed to in writing, software
|
11
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
-
# See the License for the specific language governing permissions and
|
14
|
-
# limitations under the License.
|
15
|
-
#
|
16
|
-
module Trino::Client::ModelVersions
|
17
|
-
|
18
|
-
####
|
19
|
-
## lib/trino/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command.
|
20
|
-
## You should not edit this file directly. To modify the class definitions, edit
|
21
|
-
## modelgen/model_versions.rb file and run "rake modelgen:all".
|
22
|
-
##
|
23
|
-
|
24
|
-
module V<%= @model_version.gsub(".", "_") %>
|
25
|
-
class Base < Struct
|
26
|
-
class << self
|
27
|
-
alias_method :new_struct, :new
|
28
|
-
|
29
|
-
def new(*args)
|
30
|
-
new_struct(*args) do
|
31
|
-
# make it immutable
|
32
|
-
undef_method :"[]="
|
33
|
-
members.each do |m|
|
34
|
-
undef_method :"#{m}="
|
35
|
-
end
|
36
|
-
|
37
|
-
# replace constructor to receive hash instead of array
|
38
|
-
alias_method :initialize_struct, :initialize
|
39
|
-
|
40
|
-
def initialize(params={})
|
41
|
-
initialize_struct(*members.map {|m| params[m] })
|
42
|
-
end
|
43
|
-
end
|
44
|
-
end
|
45
|
-
end
|
46
|
-
end
|
47
|
-
|
48
|
-
class StageId < String
|
49
|
-
def initialize(str)
|
50
|
-
super
|
51
|
-
splitted = split('.', 2)
|
52
|
-
@query_id = splitted[0]
|
53
|
-
@id = splitted[1]
|
54
|
-
end
|
55
|
-
|
56
|
-
attr_reader :query_id, :id
|
57
|
-
end
|
58
|
-
|
59
|
-
class TaskId < String
|
60
|
-
def initialize(str)
|
61
|
-
super
|
62
|
-
splitted = split('.', 3)
|
63
|
-
@stage_id = StageId.new("#{splitted[0]}.#{splitted[1]}")
|
64
|
-
@query_id = @stage_id.query_id
|
65
|
-
@id = splitted[2]
|
66
|
-
end
|
67
|
-
|
68
|
-
attr_reader :query_id, :stage_id, :id
|
69
|
-
end
|
70
|
-
|
71
|
-
class Lifespan < String
|
72
|
-
def initialize(str)
|
73
|
-
super
|
74
|
-
if str == "TaskWide"
|
75
|
-
@grouped = false
|
76
|
-
@group_id = 0
|
77
|
-
else
|
78
|
-
# Group1
|
79
|
-
@grouped = true
|
80
|
-
@group_id = str[5..-1].to_i
|
81
|
-
end
|
82
|
-
end
|
83
|
-
|
84
|
-
attr_reader :grouped, :group_id
|
85
|
-
end
|
86
|
-
|
87
|
-
class ConnectorSession < Hash
|
88
|
-
def initialize(hash)
|
89
|
-
super()
|
90
|
-
merge!(hash)
|
91
|
-
end
|
92
|
-
end
|
93
|
-
|
94
|
-
module PlanNode
|
95
|
-
def self.decode(hash)
|
96
|
-
unless hash.is_a?(Hash)
|
97
|
-
raise TypeError, "Can't convert #{hash.class} to Hash"
|
98
|
-
end
|
99
|
-
model_class = case hash["@type"]
|
100
|
-
when "output" then OutputNode
|
101
|
-
when "project" then ProjectNode
|
102
|
-
when "tablescan" then TableScanNode
|
103
|
-
when "values" then ValuesNode
|
104
|
-
when "aggregation" then AggregationNode
|
105
|
-
when "markDistinct" then MarkDistinctNode
|
106
|
-
when "filter" then FilterNode
|
107
|
-
when "window" then WindowNode
|
108
|
-
when "rowNumber" then RowNumberNode
|
109
|
-
when "topnRowNumber" then TopNRowNumberNode
|
110
|
-
when "limit" then LimitNode
|
111
|
-
when "distinctlimit" then DistinctLimitNode
|
112
|
-
when "topn" then TopNNode
|
113
|
-
when "sample" then SampleNode
|
114
|
-
when "sort" then SortNode
|
115
|
-
when "remoteSource" then RemoteSourceNode
|
116
|
-
when "join" then JoinNode
|
117
|
-
when "semijoin" then SemiJoinNode
|
118
|
-
when "spatialjoin" then SpatialJoinNode
|
119
|
-
when "indexjoin" then IndexJoinNode
|
120
|
-
when "indexsource" then IndexSourceNode
|
121
|
-
when "tablewriter" then TableWriterNode
|
122
|
-
when "delete" then DeleteNode
|
123
|
-
when "metadatadelete" then MetadataDeleteNode
|
124
|
-
when "tablecommit" then TableFinishNode
|
125
|
-
when "unnest" then UnnestNode
|
126
|
-
when "exchange" then ExchangeNode
|
127
|
-
when "union" then UnionNode
|
128
|
-
when "intersect" then IntersectNode
|
129
|
-
when "scalar" then EnforceSingleRowNode
|
130
|
-
when "groupid" then GroupIdNode
|
131
|
-
when "explainAnalyze" then ExplainAnalyzeNode
|
132
|
-
when "apply" then ApplyNode
|
133
|
-
when "assignUniqueId" then AssignUniqueId
|
134
|
-
when "correlatedJoin" then CorrelatedJoinNode
|
135
|
-
when "statisticsWriterNode" then StatisticsWriterNode
|
136
|
-
end
|
137
|
-
if model_class
|
138
|
-
node = model_class.decode(hash)
|
139
|
-
class << node
|
140
|
-
attr_accessor :plan_node_type
|
141
|
-
end
|
142
|
-
node.plan_node_type = hash['@type']
|
143
|
-
node
|
144
|
-
end
|
145
|
-
end
|
146
|
-
end
|
147
|
-
|
148
|
-
# io.airlift.stats.Distribution.DistributionSnapshot
|
149
|
-
class << DistributionSnapshot =
|
150
|
-
Base.new(:max_error, :count, :total, :p01, :p05, :p10, :p25, :p50, :p75, :p90, :p95, :p99, :min, :max)
|
151
|
-
def decode(hash)
|
152
|
-
unless hash.is_a?(Hash)
|
153
|
-
raise TypeError, "Can't convert #{hash.class} to Hash"
|
154
|
-
end
|
155
|
-
obj = allocate
|
156
|
-
obj.send(:initialize_struct,
|
157
|
-
hash["maxError"],
|
158
|
-
hash["count"],
|
159
|
-
hash["total"],
|
160
|
-
hash["p01"],
|
161
|
-
hash["p05"],
|
162
|
-
hash["p10"],
|
163
|
-
hash["p25"],
|
164
|
-
hash["p50"],
|
165
|
-
hash["p75"],
|
166
|
-
hash["p90"],
|
167
|
-
hash["p95"],
|
168
|
-
hash["p99"],
|
169
|
-
hash["min"],
|
170
|
-
hash["max"],
|
171
|
-
)
|
172
|
-
obj
|
173
|
-
end
|
174
|
-
end
|
175
|
-
|
176
|
-
# This is a hybrid of JoinNode.EquiJoinClause and IndexJoinNode.EquiJoinClause
|
177
|
-
class << EquiJoinClause =
|
178
|
-
Base.new(:left, :right, :probe, :index)
|
179
|
-
def decode(hash)
|
180
|
-
unless hash.is_a?(Hash)
|
181
|
-
raise TypeError, "Can't convert #{hash.class} to Hash"
|
182
|
-
end
|
183
|
-
obj = allocate
|
184
|
-
obj.send(:initialize_struct,
|
185
|
-
hash["left"],
|
186
|
-
hash["right"],
|
187
|
-
hash["probe"],
|
188
|
-
hash["index"],
|
189
|
-
)
|
190
|
-
obj
|
191
|
-
end
|
192
|
-
end
|
193
|
-
|
194
|
-
class << WriterTarget =
|
195
|
-
Base.new(:type, :handle)
|
196
|
-
def decode(hash)
|
197
|
-
unless hash.is_a?(Hash)
|
198
|
-
raise TypeError, "Can't convert #{hash.class} to Hash"
|
199
|
-
end
|
200
|
-
obj = allocate
|
201
|
-
model_class = case hash["@type"]
|
202
|
-
when "CreateTarget" then CreateTarget
|
203
|
-
when "InsertTarget" then InsertTarget
|
204
|
-
when "DeleteTarget" then DeleteTarget
|
205
|
-
end
|
206
|
-
if model_class
|
207
|
-
model_class.decode(hash)
|
208
|
-
end
|
209
|
-
end
|
210
|
-
end
|
211
|
-
|
212
|
-
class << WriteStatisticsTarget =
|
213
|
-
Base.new(:type, :handle)
|
214
|
-
def decode(hash)
|
215
|
-
unless hash.is_a?(Hash)
|
216
|
-
raise TypeError, "Can't convert #{hash.class} to Hash"
|
217
|
-
end
|
218
|
-
obj = allocate
|
219
|
-
model_class = case hash["@type"]
|
220
|
-
when "WriteStatisticsHandle" then WriteStatisticsHandle
|
221
|
-
end
|
222
|
-
if model_class
|
223
|
-
model_class.decode(hash)
|
224
|
-
end
|
225
|
-
end
|
226
|
-
end
|
227
|
-
|
228
|
-
# Inner classes
|
229
|
-
module OperatorInfo
|
230
|
-
def self.decode(hash)
|
231
|
-
unless hash.is_a?(Hash)
|
232
|
-
raise TypeError, "Can't convert #{hash.class} to Hash"
|
233
|
-
end
|
234
|
-
model_class = case hash["@type"]
|
235
|
-
when "exchangeClientStatus" then ExchangeClientStatus
|
236
|
-
when "localExchangeBuffer" then LocalExchangeBufferInfo
|
237
|
-
when "tableFinish" then TableFinishInfo
|
238
|
-
when "splitOperator" then SplitOperatorInfo
|
239
|
-
when "hashCollisionsInfo" then HashCollisionsInfo
|
240
|
-
when "partitionedOutput" then PartitionedOutputInfo
|
241
|
-
when "joinOperatorInfo" then JoinOperatorInfo
|
242
|
-
when "windowInfo" then WindowInfo
|
243
|
-
when "tableWriter" then TableWriterInfo
|
244
|
-
end
|
245
|
-
if model_class
|
246
|
-
model_class.decode(hash)
|
247
|
-
end
|
248
|
-
end
|
249
|
-
end
|
250
|
-
|
251
|
-
class << HashCollisionsInfo =
|
252
|
-
Base.new(:weighted_hash_collisions, :weighted_sum_squared_hash_collisions, :weighted_expectedHash_collisions)
|
253
|
-
def decode(hash)
|
254
|
-
unless hash.is_a?(Hash)
|
255
|
-
raise TypeError, "Can't convert #{hash.class} to Hash"
|
256
|
-
end
|
257
|
-
obj = allocate
|
258
|
-
obj.send(:initialize_struct,
|
259
|
-
hash["weighted_hash_collisions"],
|
260
|
-
hash["weighted_sum_squared_hash_collisions"],
|
261
|
-
hash["weighted_expectedHash_collisions"]
|
262
|
-
)
|
263
|
-
obj
|
264
|
-
end
|
265
|
-
end
|
266
|
-
|
267
|
-
class ResourceGroupId < Array
|
268
|
-
def initialize(array)
|
269
|
-
super()
|
270
|
-
concat(array)
|
271
|
-
end
|
272
|
-
end
|
273
|
-
|
274
|
-
##
|
275
|
-
# Those model classes are automatically generated
|
276
|
-
#
|
277
|
-
|
278
|
-
<%= @contents %>
|
279
|
-
end
|
280
|
-
end
|
data/modelgen/modelgen.rb
DELETED
@@ -1,119 +0,0 @@
|
|
1
|
-
|
2
|
-
if ARGV.length != 4
|
3
|
-
puts "usage: <model-version> <trino-source-dir> <template.erb> <output.rb>"
|
4
|
-
exit 1
|
5
|
-
end
|
6
|
-
|
7
|
-
model_version, source_dir, template_path, output_path = *ARGV
|
8
|
-
|
9
|
-
require_relative 'trino_models'
|
10
|
-
|
11
|
-
require 'erb'
|
12
|
-
erb = ERB.new(File.read(template_path))
|
13
|
-
|
14
|
-
source_path = source_dir
|
15
|
-
|
16
|
-
predefined_simple_classes = %w[StageId TaskId Lifespan ConnectorSession ResourceGroupId]
|
17
|
-
predefined_models = %w[DistributionSnapshot PlanNode EquiJoinClause WriterTarget WriteStatisticsTarget OperatorInfo HashCollisionsInfo]
|
18
|
-
|
19
|
-
assume_primitive = %w[Object Type Long Symbol QueryId PlanNodeId PlanFragmentId MemoryPoolId TransactionId URI Duration DataSize DateTime ColumnHandle ConnectorTableHandle ConnectorOutputTableHandle ConnectorIndexHandle ConnectorColumnHandle ConnectorInsertTableHandle ConnectorTableLayoutHandle Expression FunctionCall TimeZoneKey Locale TypeSignature Frame TupleDomain<ColumnHandle> SerializableNativeValue ConnectorTransactionHandle OutputBufferId ConnectorPartitioningHandle NullableValue ConnectorId HostAddress JsonNode Node CatalogName QualifiedObjectName FunctionId DynamicFilterId Instant]
|
20
|
-
enum_types = %w[QueryState StageState TaskState QueueState PlanDistribution OutputPartitioning Step SortOrder BufferState NullPartitioning BlockedReason ParameterKind FunctionKind PartitionFunctionHandle Scope ErrorType DistributionType PipelineExecutionStrategy JoinType ExchangeNode.Type ColumnStatisticType TableStatisticType StageExecutionStrategy SemanticErrorCode QueryType]
|
21
|
-
|
22
|
-
root_models = %w[QueryResults QueryInfo BasicQueryInfo] + %w[
|
23
|
-
OutputNode
|
24
|
-
ProjectNode
|
25
|
-
TableScanNode
|
26
|
-
ValuesNode
|
27
|
-
AggregationNode
|
28
|
-
MarkDistinctNode
|
29
|
-
FilterNode
|
30
|
-
WindowNode
|
31
|
-
RowNumberNode
|
32
|
-
TopNRowNumberNode
|
33
|
-
LimitNode
|
34
|
-
DistinctLimitNode
|
35
|
-
TopNNode
|
36
|
-
SampleNode
|
37
|
-
SortNode
|
38
|
-
RemoteSourceNode
|
39
|
-
JoinNode
|
40
|
-
SemiJoinNode
|
41
|
-
SpatialJoinNode
|
42
|
-
IndexJoinNode
|
43
|
-
IndexSourceNode
|
44
|
-
TableWriterNode
|
45
|
-
DeleteNode
|
46
|
-
TableFinishNode
|
47
|
-
UnnestNode
|
48
|
-
ExchangeNode
|
49
|
-
UnionNode
|
50
|
-
IntersectNode
|
51
|
-
EnforceSingleRowNode
|
52
|
-
GroupIdNode
|
53
|
-
ExplainAnalyzeNode
|
54
|
-
ApplyNode
|
55
|
-
AssignUniqueId
|
56
|
-
CorrelatedJoinNode
|
57
|
-
StatisticsWriterNode
|
58
|
-
] + %w[
|
59
|
-
ExchangeClientStatus
|
60
|
-
LocalExchangeBufferInfo
|
61
|
-
TableFinishInfo
|
62
|
-
SplitOperatorInfo
|
63
|
-
PartitionedOutputInfo
|
64
|
-
JoinOperatorInfo
|
65
|
-
WindowInfo
|
66
|
-
TableWriterInfo
|
67
|
-
]
|
68
|
-
|
69
|
-
name_mapping = Hash[*%w[
|
70
|
-
StatementStats StageStats ClientStageStats
|
71
|
-
ClientStageStats StageStats ClientStageStats
|
72
|
-
QueryResults Column ClientColumn
|
73
|
-
].each_slice(3).map { |x, y, z| [[x,y], z] }.flatten(1)]
|
74
|
-
|
75
|
-
path_mapping = Hash[*%w[
|
76
|
-
ClientColumn client/trino-client/src/main/java/io/trino/client/Column.java
|
77
|
-
ClientStageStats client/trino-client/src/main/java/io/trino/client/StageStats.java
|
78
|
-
Column core/trino-main/src/main/java/io/trino/execution/Column.java
|
79
|
-
QueryStats core/trino-main/src/main/java/io/trino/execution/QueryStats.java
|
80
|
-
StageStats core/trino-main/src/main/java/io/trino/execution/StageStats.java
|
81
|
-
PartitionedOutputInfo core/trino-main/src/main/java/io/trino/operator/PartitionedOutputOperator.java
|
82
|
-
TableWriterInfo core/trino-main/src/main/java/io/trino/operator/TableWriterOperator.java
|
83
|
-
TableInfo core/trino-main/src/main/java/io/trino/execution/TableInfo.java
|
84
|
-
DynamicFiltersStats core/trino-main/src/main/java/io/trino/server/DynamicFilterService.java
|
85
|
-
].map.with_index { |v,i| i % 2 == 0 ? v : (source_path + "/" + v) }]
|
86
|
-
|
87
|
-
# model => [ [key,nullable,type], ... ]
|
88
|
-
extra_fields = {
|
89
|
-
'QueryInfo' => [['finalQueryInfo', nil, 'boolean']]
|
90
|
-
}
|
91
|
-
|
92
|
-
analyzer = TrinoModels::ModelAnalyzer.new(
|
93
|
-
source_path,
|
94
|
-
skip_models: predefined_models + predefined_simple_classes + assume_primitive + enum_types,
|
95
|
-
path_mapping: path_mapping,
|
96
|
-
name_mapping: name_mapping,
|
97
|
-
extra_fields: extra_fields
|
98
|
-
)
|
99
|
-
analyzer.analyze(root_models)
|
100
|
-
models = analyzer.models
|
101
|
-
skipped_models = analyzer.skipped_models
|
102
|
-
|
103
|
-
formatter = TrinoModels::ModelFormatter.new(
|
104
|
-
base_indent_count: 2,
|
105
|
-
struct_class: "Base",
|
106
|
-
special_struct_initialize_method: "initialize_struct",
|
107
|
-
primitive_types: assume_primitive,
|
108
|
-
skip_types: skipped_models,
|
109
|
-
simple_classes: predefined_simple_classes,
|
110
|
-
enum_types: enum_types,
|
111
|
-
)
|
112
|
-
formatter.format(models)
|
113
|
-
|
114
|
-
@contents = formatter.contents
|
115
|
-
@model_version = model_version
|
116
|
-
|
117
|
-
data = erb.result
|
118
|
-
File.write(output_path, data)
|
119
|
-
|
data/modelgen/models.rb
DELETED
@@ -1,31 +0,0 @@
|
|
1
|
-
#
|
2
|
-
# Trino client for Ruby
|
3
|
-
#
|
4
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
-
# you may not use this file except in compliance with the License.
|
6
|
-
# You may obtain a copy of the License at
|
7
|
-
#
|
8
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
-
#
|
10
|
-
# Unless required by applicable law or agreed to in writing, software
|
11
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
-
# See the License for the specific language governing permissions and
|
14
|
-
# limitations under the License.
|
15
|
-
#
|
16
|
-
module Trino::Client
|
17
|
-
|
18
|
-
####
|
19
|
-
## lib/trino/client/models.rb is automatically generated using "rake modelgen:latest" command.
|
20
|
-
## You should not edit this file directly. To modify the class definitions, edit
|
21
|
-
## modelgen/models.rb file and run "rake modelgen:latest".
|
22
|
-
##
|
23
|
-
|
24
|
-
module ModelVersions
|
25
|
-
end
|
26
|
-
<% @versions.each do |ver| %>
|
27
|
-
require 'trino/client/model_versions/<%= ver %>.rb'<% end %>
|
28
|
-
|
29
|
-
Models = ModelVersions::V<%= @latest_version.gsub(".", "_") %>
|
30
|
-
|
31
|
-
end
|
data/modelgen/trino_models.rb
DELETED
@@ -1,270 +0,0 @@
|
|
1
|
-
|
2
|
-
module TrinoModels
|
3
|
-
require 'find'
|
4
|
-
require 'stringio'
|
5
|
-
|
6
|
-
PRIMITIVE_TYPES = %w[String boolean long int short byte double float Integer Double Boolean]
|
7
|
-
ARRAY_PRIMITIVE_TYPES = PRIMITIVE_TYPES.map { |t| "#{t}[]" }
|
8
|
-
|
9
|
-
class Model < Struct.new(:name, :fields)
|
10
|
-
end
|
11
|
-
|
12
|
-
class Field < Struct.new(:key, :nullable, :array, :map, :type, :base_type, :map_value_base_type, :base_type_alias)
|
13
|
-
alias_method :nullable?, :nullable
|
14
|
-
alias_method :array?, :array
|
15
|
-
alias_method :map?, :map
|
16
|
-
|
17
|
-
def name
|
18
|
-
@name ||= key.gsub(/[A-Z]/) {|f| "_#{f.downcase}" }
|
19
|
-
end
|
20
|
-
end
|
21
|
-
|
22
|
-
class ModelAnalysisError < StandardError
|
23
|
-
end
|
24
|
-
|
25
|
-
class ModelAnalyzer
|
26
|
-
def initialize(source_path, options={})
|
27
|
-
@source_path = source_path
|
28
|
-
@ignore_types = PRIMITIVE_TYPES + ARRAY_PRIMITIVE_TYPES + (options[:skip_models] || [])
|
29
|
-
@path_mapping = options[:path_mapping] || {}
|
30
|
-
@name_mapping = options[:name_mapping] || {}
|
31
|
-
@extra_fields = options[:extra_fields] || {}
|
32
|
-
@models = {}
|
33
|
-
@skipped_models = []
|
34
|
-
end
|
35
|
-
|
36
|
-
attr_reader :skipped_models
|
37
|
-
|
38
|
-
def models
|
39
|
-
@models.values.sort_by {|model| model.name }
|
40
|
-
end
|
41
|
-
|
42
|
-
def analyze(root_models)
|
43
|
-
root_models.each {|model_name|
|
44
|
-
analyze_model(model_name)
|
45
|
-
}
|
46
|
-
end
|
47
|
-
|
48
|
-
private
|
49
|
-
|
50
|
-
PROPERTY_PATTERN = /@JsonProperty\(\"(\w+)\"\)\s+(@Nullable\s+)?([\w\<\>\[\]\,\s\.]+)\s+\w+/
|
51
|
-
CREATOR_PATTERN = /@JsonCreator[\s]+public[\s]+(static\s+)?(\w+)[\w\s]*\((?:\s*#{PROPERTY_PATTERN}\s*,?)+\)/
|
52
|
-
GENERIC_PATTERN = /(\w+)\<(\w+)\>/
|
53
|
-
|
54
|
-
def analyze_fields(model_name, creator_block, generic: nil)
|
55
|
-
model_name = "#{model_name}_#{generic}" if generic
|
56
|
-
extra = @extra_fields[model_name] || []
|
57
|
-
fields = creator_block.scan(PROPERTY_PATTERN).concat(extra).map do |key,nullable,type|
|
58
|
-
map = false
|
59
|
-
array = false
|
60
|
-
nullable = !!nullable
|
61
|
-
if m = /(?:List|Set)<(\w+)>/.match(type)
|
62
|
-
base_type = m[1]
|
63
|
-
array = true
|
64
|
-
elsif m = /(?:Map|ListMultimap)<(\w+),\s*(\w+)>/.match(type)
|
65
|
-
base_type = m[1]
|
66
|
-
map_value_base_type = m[2]
|
67
|
-
map = true
|
68
|
-
elsif m = /Optional<([\w\[\]\<\>]+)>/.match(type)
|
69
|
-
base_type = m[1]
|
70
|
-
nullable = true
|
71
|
-
elsif m = /OptionalInt/.match(type)
|
72
|
-
base_type = 'Integer'
|
73
|
-
nullable = true
|
74
|
-
elsif m = /OptionalLong/.match(type)
|
75
|
-
base_type = 'Long'
|
76
|
-
nullable = true
|
77
|
-
elsif m = /OptionalDouble/.match(type)
|
78
|
-
base_type = 'Double'
|
79
|
-
nullable = true
|
80
|
-
elsif type =~ /\w+/
|
81
|
-
base_type = type
|
82
|
-
else
|
83
|
-
raise ModelAnalysisError, "Unsupported type #{type} in model #{model_name}"
|
84
|
-
end
|
85
|
-
base_type = @name_mapping[[model_name, base_type]] || base_type
|
86
|
-
map_value_base_type = @name_mapping[[model_name, map_value_base_type]] || map_value_base_type
|
87
|
-
|
88
|
-
if generic
|
89
|
-
base_type = generic if base_type == 'T'
|
90
|
-
map_value_base_type = generic if map_value_base_type == 'T'
|
91
|
-
end
|
92
|
-
if m = GENERIC_PATTERN.match(base_type)
|
93
|
-
base_type_alias = "#{m[1]}_#{m[2]}"
|
94
|
-
end
|
95
|
-
|
96
|
-
Field.new(key, !!nullable, array, map, type, base_type, map_value_base_type, base_type_alias)
|
97
|
-
end
|
98
|
-
|
99
|
-
@models[model_name] = Model.new(model_name, fields)
|
100
|
-
# recursive call
|
101
|
-
fields.each do |field|
|
102
|
-
analyze_model(field.base_type, model_name)
|
103
|
-
analyze_model(field.map_value_base_type, model_name) if field.map_value_base_type
|
104
|
-
end
|
105
|
-
|
106
|
-
return fields
|
107
|
-
end
|
108
|
-
|
109
|
-
def analyze_model(model_name, parent_model= nil, generic: nil)
|
110
|
-
return if @models[model_name] || @ignore_types.include?(model_name)
|
111
|
-
|
112
|
-
if m = GENERIC_PATTERN.match(model_name)
|
113
|
-
analyze_model(m[1], generic: m[2])
|
114
|
-
analyze_model(m[2])
|
115
|
-
return
|
116
|
-
end
|
117
|
-
|
118
|
-
path = find_class_file(model_name, parent_model)
|
119
|
-
java = File.read(path)
|
120
|
-
|
121
|
-
m = CREATOR_PATTERN.match(java)
|
122
|
-
unless m
|
123
|
-
raise ModelAnalysisError, "Can't find JsonCreator of a model class #{model_name} of #{parent_model} at #{path}"
|
124
|
-
end
|
125
|
-
|
126
|
-
body = m[0]
|
127
|
-
# check inner class first
|
128
|
-
while true
|
129
|
-
offset = m.end(0)
|
130
|
-
m = CREATOR_PATTERN.match(java, offset)
|
131
|
-
break unless m
|
132
|
-
inner_model_name = m[2]
|
133
|
-
next if @models[inner_model_name] || @ignore_types.include?(inner_model_name)
|
134
|
-
fields = analyze_fields(inner_model_name, m[0])
|
135
|
-
end
|
136
|
-
|
137
|
-
fields = analyze_fields(model_name, body, generic: generic)
|
138
|
-
|
139
|
-
rescue => e
|
140
|
-
puts "Skipping model #{parent_model}/#{model_name}: #{e}"
|
141
|
-
@skipped_models << model_name
|
142
|
-
end
|
143
|
-
|
144
|
-
def find_class_file(model_name, parent_model)
|
145
|
-
return @path_mapping[model_name] if @path_mapping.has_key? model_name
|
146
|
-
|
147
|
-
@source_files ||= Find.find(@source_path).to_a
|
148
|
-
pattern = /\/#{model_name}.java$/
|
149
|
-
matched = @source_files.find_all {|path| path =~ pattern && !path.include?('/test/') && !path.include?('/verifier/')}
|
150
|
-
if matched.empty?
|
151
|
-
raise ModelAnalysisError, "Model class #{model_name} is not found"
|
152
|
-
end
|
153
|
-
if matched.size == 1
|
154
|
-
return matched.first
|
155
|
-
else
|
156
|
-
raise ModelAnalysisError, "Model class #{model_name} of #{parent_model} found multiple match #{matched}"
|
157
|
-
end
|
158
|
-
end
|
159
|
-
end
|
160
|
-
|
161
|
-
class ModelFormatter
|
162
|
-
def initialize(options={})
|
163
|
-
@indent = options[:indent] || ' '
|
164
|
-
@base_indent_count = options[:base_indent_count] || 0
|
165
|
-
@struct_class = options[:struct_class] || 'Struct'
|
166
|
-
@special_struct_initialize_method = options[:special_struct_initialize_method]
|
167
|
-
@primitive_types = PRIMITIVE_TYPES + ARRAY_PRIMITIVE_TYPES + (options[:primitive_types] || [])
|
168
|
-
@skip_types = options[:skip_types] || []
|
169
|
-
@simple_classes = options[:simple_classes]
|
170
|
-
@enum_types = options[:enum_types]
|
171
|
-
@special_types = options[:special_types] || {}
|
172
|
-
@data = StringIO.new
|
173
|
-
end
|
174
|
-
|
175
|
-
def contents
|
176
|
-
@data.string
|
177
|
-
end
|
178
|
-
|
179
|
-
def format(models)
|
180
|
-
@models = models
|
181
|
-
models.each do |model|
|
182
|
-
@model = model
|
183
|
-
|
184
|
-
puts_with_indent 0, "class << #{model.name} ="
|
185
|
-
puts_with_indent 2, "#{@struct_class}.new(#{model.fields.map {|f| ":#{f.name}" }.join(', ')})"
|
186
|
-
format_decode
|
187
|
-
puts_with_indent 0, "end"
|
188
|
-
line
|
189
|
-
end
|
190
|
-
end
|
191
|
-
|
192
|
-
private
|
193
|
-
|
194
|
-
def line
|
195
|
-
@data.puts ""
|
196
|
-
end
|
197
|
-
|
198
|
-
def puts_with_indent(n, str)
|
199
|
-
@data.puts "#{@indent * (@base_indent_count + n)}#{str}"
|
200
|
-
end
|
201
|
-
|
202
|
-
def format_decode
|
203
|
-
puts_with_indent 1, "def decode(hash)"
|
204
|
-
|
205
|
-
puts_with_indent 2, "unless hash.is_a?(Hash)"
|
206
|
-
puts_with_indent 3, "raise TypeError, \"Can't convert \#{hash.class} to Hash\""
|
207
|
-
puts_with_indent 2, "end"
|
208
|
-
|
209
|
-
if @special_struct_initialize_method
|
210
|
-
puts_with_indent 2, "obj = allocate"
|
211
|
-
puts_with_indent 2, "obj.send(:#{@special_struct_initialize_method},"
|
212
|
-
else
|
213
|
-
puts_with_indent 2, "new("
|
214
|
-
end
|
215
|
-
|
216
|
-
@model.fields.each do |field|
|
217
|
-
next if @skip_types.include?(field.base_type) || @skip_types.include?(field.map_value_base_type)
|
218
|
-
|
219
|
-
if @primitive_types.include?(field.base_type) && !field.map?
|
220
|
-
expr = "hash[\"#{field.key}\"]"
|
221
|
-
else
|
222
|
-
expr = ""
|
223
|
-
expr << "hash[\"#{field.key}\"] && " #if field.nullable?
|
224
|
-
|
225
|
-
if field.map?
|
226
|
-
key_expr = convert_expression(field.base_type, field.base_type, "k")
|
227
|
-
value_expr = convert_expression(field.map_value_base_type, field.map_value_base_type, "v")
|
228
|
-
if key_expr == 'k' && value_expr == 'v'
|
229
|
-
expr = "hash[\"#{field.key}\"]"
|
230
|
-
else
|
231
|
-
expr << "Hash[hash[\"#{field.key}\"].to_a.map! {|k,v| [#{key_expr}, #{value_expr}] }]"
|
232
|
-
end
|
233
|
-
elsif field.array?
|
234
|
-
elem_expr = convert_expression(field.base_type, field.base_type, "h")
|
235
|
-
expr << "hash[\"#{field.key}\"].map {|h| #{elem_expr} }"
|
236
|
-
else
|
237
|
-
expr << convert_expression(field.type, field.base_type_alias || field.base_type, "hash[\"#{field.key}\"]")
|
238
|
-
end
|
239
|
-
end
|
240
|
-
|
241
|
-
#comment = "# #{field.base_type}#{field.array? ? '[]' : ''} #{field.key}"
|
242
|
-
#puts_with_indent 3, "#{expr}, #{comment}"
|
243
|
-
puts_with_indent 3, "#{expr},"
|
244
|
-
end
|
245
|
-
|
246
|
-
puts_with_indent 2, ")"
|
247
|
-
|
248
|
-
if @special_struct_initialize_method
|
249
|
-
puts_with_indent 2, "obj"
|
250
|
-
end
|
251
|
-
|
252
|
-
puts_with_indent 1, "end"
|
253
|
-
end
|
254
|
-
|
255
|
-
def convert_expression(type, base_type, key)
|
256
|
-
if @special_types[type]
|
257
|
-
special.call(key)
|
258
|
-
elsif @enum_types.include?(type) || @enum_types.include?(base_type)
|
259
|
-
"#{key}.downcase.to_sym"
|
260
|
-
elsif @primitive_types.include?(base_type)
|
261
|
-
key
|
262
|
-
elsif @simple_classes.include?(base_type)
|
263
|
-
"#{base_type}.new(#{key})"
|
264
|
-
else # model class
|
265
|
-
"#{base_type}.decode(#{key})"
|
266
|
-
end
|
267
|
-
end
|
268
|
-
end
|
269
|
-
end
|
270
|
-
|