xapixctl 1.2.1 → 1.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile.lock +1 -1
- data/lib/xapixctl/cli.rb +8 -0
- data/lib/xapixctl/connector_cli.rb +49 -0
- data/lib/xapixctl/phoenix_client/project_connection.rb +20 -0
- data/lib/xapixctl/titan_cli.rb +281 -0
- data/lib/xapixctl/version.rb +1 -1
- metadata +4 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 4ba3cb0a95f481dfd6568282c75d0bc4a2db36a03c4b8a177401deac61b04eaa
|
4
|
+
data.tar.gz: f5cd613fdcb2c9a204b224b6504348c4840fe45fd3506bea904bb26d5367793a
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: c0635abac3da2424e0109306eec1795dac8ae2cf9cb81aa4f8769ef397f5e70f4b5db7095b2820f0a49f7902d2d3e75df86b2128622fdaa10bb3082dea38ddab
|
7
|
+
data.tar.gz: f14855a202d934303acf771987686dcf395c0842a870a309283bbd080c1ebd32c0c1178e1a0df2256352c575920e4116010953a4e6d2a8833e178f27838f5e85
|
data/Gemfile.lock
CHANGED
data/lib/xapixctl/cli.rb
CHANGED
@@ -1,17 +1,25 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
require 'xapixctl/base_cli'
|
4
|
+
require 'xapixctl/connector_cli'
|
4
5
|
require 'xapixctl/preview_cli'
|
5
6
|
require 'xapixctl/sync_cli'
|
7
|
+
require 'xapixctl/titan_cli'
|
6
8
|
|
7
9
|
module Xapixctl
|
8
10
|
class Cli < BaseCli
|
11
|
+
desc "connectors SUBCOMMAND ...ARGS", "Commands for Connector resources"
|
12
|
+
subcommand "connectors", ConnectorCli
|
13
|
+
|
9
14
|
desc "preview SUBCOMMAND ...ARGS", "Request preview for resources"
|
10
15
|
subcommand "preview", PreviewCli
|
11
16
|
|
12
17
|
desc "sync SUBCOMMAND ...ARGS", "Sync resources"
|
13
18
|
subcommand "sync", SyncCli
|
14
19
|
|
20
|
+
desc "titan SUBCOMMAND ...ARGS", "Tools for ML model deployments and service generation", hide: true
|
21
|
+
subcommand "titan", TitanCli
|
22
|
+
|
15
23
|
option :format, aliases: "-f", default: 'text', enum: ['text', 'yaml', 'json'], desc: "Output format"
|
16
24
|
desc "get TYPE [ID]", "retrieve either all resources of given TYPE or just the resource of given TYPE and ID"
|
17
25
|
long_desc <<-LONGDESC
|
@@ -0,0 +1,49 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'xapixctl/base_cli'
|
4
|
+
|
5
|
+
module Xapixctl
|
6
|
+
class ConnectorCli < BaseCli
|
7
|
+
option :schema_import, desc: "Resource id of an existing Schema import"
|
8
|
+
desc "import SPECFILE", "Create HTTP Connectors from Swagger / OpenAPI or SOAP WSDL files"
|
9
|
+
long_desc <<-LONGDESC
|
10
|
+
`xapixctl connectors import SPECFILE` will create HTTP Connectors from the given Swagger / OpenAPI or SOAP WSDL file.
|
11
|
+
|
12
|
+
Examples:
|
13
|
+
\x5> $ xapixctl connectors import ./swagger.json -p xapix/some-project
|
14
|
+
\x5> $ xapixctl connectors import ./swagger.json -p xapix/some-project --schema-import=existing-schema
|
15
|
+
LONGDESC
|
16
|
+
def import(spec_filename)
|
17
|
+
path = Pathname.new(spec_filename)
|
18
|
+
unless path.file? && path.readable?
|
19
|
+
warn "Cannot read #{path}, please check file exists and is readable"
|
20
|
+
exit 1
|
21
|
+
end
|
22
|
+
if options[:schema_import]
|
23
|
+
puts "uploading to update schema import '#{options[:schema_import]}': #{spec_filename}..."
|
24
|
+
result = prj_connection.update_schema_import(options[:schema_import], spec_filename)
|
25
|
+
puts "updated #{result.dig('resource', 'kind')} #{result.dig('resource', 'id')}"
|
26
|
+
else
|
27
|
+
puts "uploading as new import: #{spec_filename}..."
|
28
|
+
result = prj_connection.add_schema_import(spec_filename)
|
29
|
+
puts "created #{result.dig('resource', 'kind')} #{result.dig('resource', 'id')}"
|
30
|
+
end
|
31
|
+
|
32
|
+
[['issues', 'import'], ['validation_issues', 'validation']].each do |key, name|
|
33
|
+
issues = result.dig('schema_import', 'report', key)
|
34
|
+
if issues.any?
|
35
|
+
puts "\n#{name} issues:"
|
36
|
+
issues.each { |issue| puts " - #{issue}" }
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
updated_resources = result.dig('schema_import', 'updated_resources')
|
41
|
+
if updated_resources.any?
|
42
|
+
puts "\nconnectors:"
|
43
|
+
updated_resources.each { |resource| puts " - #{resource['kind']} #{resource['id']}" }
|
44
|
+
else
|
45
|
+
puts "\nno connectors created/updated."
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
@@ -76,6 +76,18 @@ module Xapixctl
|
|
76
76
|
run { @client[data_source_preview_path(data_source_id)].post(preview_data.to_json, content_type: :json) }
|
77
77
|
end
|
78
78
|
|
79
|
+
def add_schema_import(spec_filename, &block)
|
80
|
+
spec_data = { schema_import: { file: File.new(spec_filename, 'r') } }
|
81
|
+
result_handler(block).
|
82
|
+
run { @client[schema_imports_path].post(spec_data) }
|
83
|
+
end
|
84
|
+
|
85
|
+
def update_schema_import(schema_import, spec_filename, &block)
|
86
|
+
spec_data = { schema_import: { file: File.new(spec_filename, 'r') } }
|
87
|
+
result_handler(block).
|
88
|
+
run { @client[schema_import_path(schema_import)].patch(spec_data) }
|
89
|
+
end
|
90
|
+
|
79
91
|
def pipeline_preview(pipeline_id, format: :hash, &block)
|
80
92
|
result_handler(block).
|
81
93
|
prepare_data(->(data) { data['pipeline_preview'] }).
|
@@ -128,6 +140,14 @@ module Xapixctl
|
|
128
140
|
"/projects/#{@org}/#{@project}/onboarding/data_sources/#{id}/preview"
|
129
141
|
end
|
130
142
|
|
143
|
+
def schema_imports_path
|
144
|
+
"/projects/#{@org}/#{@project}/onboarding/schema_imports"
|
145
|
+
end
|
146
|
+
|
147
|
+
def schema_import_path(schema_import)
|
148
|
+
"/projects/#{@org}/#{@project}/onboarding/schema_imports/#{schema_import}"
|
149
|
+
end
|
150
|
+
|
131
151
|
def resource_path(type, id)
|
132
152
|
"/projects/#{@org}/#{@project}/#{translate_type(type)}/#{id}"
|
133
153
|
end
|
@@ -0,0 +1,281 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'xapixctl/base_cli'
|
4
|
+
|
5
|
+
module Xapixctl
|
6
|
+
class TitanCli < BaseCli
|
7
|
+
DEFAULT_METHODS = {
|
8
|
+
"predict" => :predict,
|
9
|
+
"performance" => :performance,
|
10
|
+
}.freeze
|
11
|
+
|
12
|
+
option :schema_import, desc: "Resource id of an existing Schema import"
|
13
|
+
option :data, desc: "JSON encoded data for predict API", default: "[[1,2,3]]"
|
14
|
+
desc "service URL", "build a service for the deployed model"
|
15
|
+
long_desc <<-LONGDESC
|
16
|
+
`xapixctl titan service URL` will build a ML service around a deployed ML model.
|
17
|
+
|
18
|
+
We expect the following of the deployed ML model:
|
19
|
+
|
20
|
+
- There should be a "POST /predict" endpoint. Use --data="JSON" to specify an example dataset the model expects.
|
21
|
+
|
22
|
+
- If there is a "GET /performance" endpoint, it'll be made available. It's expected to return a JSON object with the properties 'accuracy', 'precision', 'recall', and 'min_acc_threshold'.
|
23
|
+
|
24
|
+
Examples:
|
25
|
+
\x5> $ xapixctl titan service https://services.demo.akoios.com/ai-model-name -p xapix/ml-project
|
26
|
+
LONGDESC
|
27
|
+
def service(akoios_url)
|
28
|
+
url = URI.parse(akoios_url)
|
29
|
+
schema = JSON.parse(RestClient.get(File.join(url.to_s, 'spec'), params: { host: url.hostname }, headers: { accept: :json }))
|
30
|
+
patch_schema(schema)
|
31
|
+
connector_refs = import_swagger(File.basename(url.path), schema)
|
32
|
+
say "\n== Onboarding Connectors", :bold
|
33
|
+
connectors = match_connectors_to_action(connector_refs)
|
34
|
+
say "\n== Building Service", :bold
|
35
|
+
service_doc = build_service(schema.dig('info', 'title'), connectors)
|
36
|
+
res = prj_connection.apply(service_doc)
|
37
|
+
say "\ncreated / updated service #{res.first}"
|
38
|
+
end
|
39
|
+
|
40
|
+
private
|
41
|
+
|
42
|
+
def patch_schema(schema)
|
43
|
+
predict_schema = schema.dig('paths', '/predict', 'post')
|
44
|
+
if predict_schema
|
45
|
+
predict_data = JSON.parse(options[:data]) rescue {}
|
46
|
+
predict_schema['operationId'] = 'predict'
|
47
|
+
predict_schema['parameters'].each do |param|
|
48
|
+
if param['name'] == 'json' && param['in'] == 'body'
|
49
|
+
param['schema']['properties'] = { "data" => extract_schema(predict_data) }
|
50
|
+
param['schema']['example'] = { "data" => predict_data }
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
performane_schema = schema.dig('paths', '/performance', 'get')
|
56
|
+
if performane_schema
|
57
|
+
performane_schema['operationId'] = 'performance'
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
def import_swagger(filename, schema)
|
62
|
+
Tempfile.create([filename, '.json']) do |f|
|
63
|
+
f.write(schema.to_json)
|
64
|
+
f.rewind
|
65
|
+
|
66
|
+
if options[:schema_import]
|
67
|
+
result = prj_connection.update_schema_import(options[:schema_import], f)
|
68
|
+
say "updated #{result.dig('resource', 'kind')} #{result.dig('resource', 'id')}"
|
69
|
+
else
|
70
|
+
result = prj_connection.add_schema_import(f)
|
71
|
+
say "created #{result.dig('resource', 'kind')} #{result.dig('resource', 'id')}"
|
72
|
+
end
|
73
|
+
result.dig('schema_import', 'updated_resources')
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
def match_connectors_to_action(connector_refs)
|
78
|
+
connector_refs.map do |connector_ref|
|
79
|
+
connector = prj_connection.resource(connector_ref['kind'], connector_ref['id'])
|
80
|
+
action = DEFAULT_METHODS[connector.dig('definition', 'name')]
|
81
|
+
say "\n#{connector['kind']} #{connector.dig('definition', 'name')} -> "
|
82
|
+
if action
|
83
|
+
say "#{action} action"
|
84
|
+
[action, update_connector_with_preview(connector)]
|
85
|
+
else
|
86
|
+
say "no action type detected, ignoring"
|
87
|
+
nil
|
88
|
+
end
|
89
|
+
end.compact
|
90
|
+
end
|
91
|
+
|
92
|
+
def update_connector_with_preview(connector)
|
93
|
+
say "fetching preview for #{connector['kind']} #{connector.dig('definition', 'name')}..."
|
94
|
+
preview_details = prj_connection.data_source_preview(connector.dig('metadata', 'id'))
|
95
|
+
preview = preview_details.dig('preview', 'sample')
|
96
|
+
say "got a #{preview['status']} response: #{preview['body']}"
|
97
|
+
if preview['status'] != 200
|
98
|
+
say "unexpected status, please check data or model"
|
99
|
+
elsif yes?("Does this look alright?", :bold)
|
100
|
+
res = prj_connection.accept_data_source_preview(connector.dig('metadata', 'id'))
|
101
|
+
return res.dig('data_source', 'resource_description')
|
102
|
+
end
|
103
|
+
connector
|
104
|
+
end
|
105
|
+
|
106
|
+
def extract_schema(data_sample)
|
107
|
+
case data_sample
|
108
|
+
when Array
|
109
|
+
{ type: 'array', items: extract_schema(data_sample[0]) }
|
110
|
+
when Hash
|
111
|
+
{ type: 'object', properties: data_sample.transform_values { |v| extract_schema(v) } }
|
112
|
+
when Numeric
|
113
|
+
{ type: 'number' }
|
114
|
+
else
|
115
|
+
{}
|
116
|
+
end
|
117
|
+
end
|
118
|
+
|
119
|
+
def build_service(title, connectors)
|
120
|
+
{
|
121
|
+
version: 'v1',
|
122
|
+
kind: 'Service',
|
123
|
+
metadata: { id: title.parameterize },
|
124
|
+
definition: {
|
125
|
+
name: title.humanize,
|
126
|
+
actions: connectors.map { |action, connector| build_service_action(action, connector) }
|
127
|
+
}
|
128
|
+
}
|
129
|
+
end
|
130
|
+
|
131
|
+
def build_service_action(action_type, connector)
|
132
|
+
{
|
133
|
+
name: action_type,
|
134
|
+
parameter_schema: parameter_schema(action_type, connector),
|
135
|
+
result_schema: result_schema(action_type),
|
136
|
+
pipeline: { units: pipeline_units(action_type, connector) }
|
137
|
+
}
|
138
|
+
end
|
139
|
+
|
140
|
+
def parameter_schema(action_type, connector)
|
141
|
+
case action_type
|
142
|
+
when :predict
|
143
|
+
{ type: 'object', properties: {
|
144
|
+
data: connector.dig('definition', 'parameter_schema', 'properties', 'body', 'properties', 'data'),
|
145
|
+
} }
|
146
|
+
when :performance
|
147
|
+
{ type: 'object', properties: {} }
|
148
|
+
end
|
149
|
+
end
|
150
|
+
|
151
|
+
def result_schema(action_type)
|
152
|
+
case action_type
|
153
|
+
when :predict
|
154
|
+
{ type: 'object', properties: {
|
155
|
+
prediction: { type: 'object', properties: { percent: { type: 'number' }, raw: { type: 'number' } } },
|
156
|
+
success: { type: 'boolean' },
|
157
|
+
error: { type: 'string' }
|
158
|
+
} }
|
159
|
+
when :performance
|
160
|
+
{ type: 'object', properties: {
|
161
|
+
performance: {
|
162
|
+
type: 'object', properties: {
|
163
|
+
accuracy: { type: 'number' },
|
164
|
+
precision: { type: 'number' },
|
165
|
+
recall: { type: 'number' },
|
166
|
+
min_acc_threshold: { type: 'number' }
|
167
|
+
}
|
168
|
+
}
|
169
|
+
} }
|
170
|
+
end
|
171
|
+
end
|
172
|
+
|
173
|
+
def pipeline_units(action_type, connector)
|
174
|
+
case action_type
|
175
|
+
when :predict
|
176
|
+
[entry_unit, predict_unit(connector), predict_result_unit]
|
177
|
+
when :performance
|
178
|
+
[entry_unit, performance_unit(connector), performance_result_unit]
|
179
|
+
end
|
180
|
+
end
|
181
|
+
|
182
|
+
def entry_unit
|
183
|
+
{ version: 'v2', kind: 'Unit/Entry',
|
184
|
+
metadata: { id: 'entry' },
|
185
|
+
definition: { name: 'Entry' } }
|
186
|
+
end
|
187
|
+
|
188
|
+
def predict_unit(connector)
|
189
|
+
leafs = extract_leafs(connector.dig('definition', 'parameter_schema', 'properties', 'body'))
|
190
|
+
{ version: 'v2', kind: 'Unit/DataSource',
|
191
|
+
metadata: { id: 'predict' },
|
192
|
+
definition: {
|
193
|
+
name: 'Predict',
|
194
|
+
inputs: ['entry'],
|
195
|
+
data_source: connector.dig('metadata', 'id'),
|
196
|
+
formulas: leafs.map { |leaf|
|
197
|
+
{ ref: leaf[:node]['$id'], formula: (['', 'entry'] + leaf[:path]).join('.') }
|
198
|
+
}
|
199
|
+
} }
|
200
|
+
end
|
201
|
+
|
202
|
+
def predict_result_unit
|
203
|
+
{ version: 'v2', kind: 'Unit/Result',
|
204
|
+
metadata: { id: 'result' },
|
205
|
+
definition: {
|
206
|
+
name: 'Result',
|
207
|
+
inputs: ['predict'],
|
208
|
+
formulas: [{
|
209
|
+
ref: "#prediction.raw",
|
210
|
+
formula: "IF(.predict.status = 200, ROUND(100*coerce.to-float(.predict.body), 2))"
|
211
|
+
}, {
|
212
|
+
ref: "#prediction.percent",
|
213
|
+
formula: "IF(.predict.status = 200, coerce.to-float(.predict.body))"
|
214
|
+
}, {
|
215
|
+
ref: "#success",
|
216
|
+
formula: ".predict.status = 200"
|
217
|
+
}, {
|
218
|
+
ref: "#error",
|
219
|
+
formula: "IF(.predict.status <> 200, 'Model not trained!')"
|
220
|
+
}],
|
221
|
+
parameter_sample: {
|
222
|
+
"prediction" => { "percent" => 51, "raw" => 0.5112131 },
|
223
|
+
"success" => true,
|
224
|
+
"error" => nil
|
225
|
+
}
|
226
|
+
} }
|
227
|
+
end
|
228
|
+
|
229
|
+
def performance_unit(connector)
|
230
|
+
{ version: 'v2', kind: 'Unit/DataSource',
|
231
|
+
metadata: { id: 'performance' },
|
232
|
+
definition: {
|
233
|
+
name: 'Performance',
|
234
|
+
inputs: ['entry'],
|
235
|
+
data_source: connector.dig('metadata', 'id')
|
236
|
+
} }
|
237
|
+
end
|
238
|
+
|
239
|
+
def performance_result_unit
|
240
|
+
{ version: 'v2', kind: 'Unit/Result',
|
241
|
+
metadata: { id: 'result' },
|
242
|
+
definition: {
|
243
|
+
name: 'Result',
|
244
|
+
inputs: ['performance'],
|
245
|
+
formulas: [{
|
246
|
+
ref: "#performance.recall",
|
247
|
+
formula: "WITH(data, JSON.DECODE(REGEXREPLACE(performance.body, \"'\", \"\\\"\")), .data.recall)"
|
248
|
+
}, {
|
249
|
+
ref: "#performance.accuracy",
|
250
|
+
formula: "WITH(data, JSON.DECODE(REGEXREPLACE(performance.body, \"'\", \"\\\"\")), .data.accuracy)"
|
251
|
+
}, {
|
252
|
+
ref: "#performance.precision",
|
253
|
+
formula: "WITH(data, JSON.DECODE(REGEXREPLACE(performance.body, \"'\", \"\\\"\")), .data.precision)"
|
254
|
+
}, {
|
255
|
+
ref: "#performance.min_acc_threshold",
|
256
|
+
formula: "WITH(data, JSON.DECODE(REGEXREPLACE(performance.body, \"'\", \"\\\"\")), .data.min_acc_threshold)"
|
257
|
+
}],
|
258
|
+
parameter_sample: {
|
259
|
+
"performance" => {
|
260
|
+
"recall" => 0.8184713375796179,
|
261
|
+
"accuracy" => 0.9807847896440129,
|
262
|
+
"precision" => 0.8711864406779661,
|
263
|
+
"min_acc_threshold" => 0.84,
|
264
|
+
}
|
265
|
+
}
|
266
|
+
} }
|
267
|
+
end
|
268
|
+
|
269
|
+
def extract_leafs(schema, current_path = [])
|
270
|
+
return unless schema
|
271
|
+
case schema['type']
|
272
|
+
when 'object'
|
273
|
+
schema['properties'].flat_map { |key, sub_schema| extract_leafs(sub_schema, current_path + [key]) }.compact
|
274
|
+
when 'array'
|
275
|
+
extract_leafs(schema['items'], current_path + [:[]])
|
276
|
+
else
|
277
|
+
{ path: current_path, node: schema }
|
278
|
+
end
|
279
|
+
end
|
280
|
+
end
|
281
|
+
end
|
data/lib/xapixctl/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: xapixctl
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.2.
|
4
|
+
version: 1.2.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Michael Reinsch
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2021-03-
|
11
|
+
date: 2021-03-31 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: activesupport
|
@@ -206,6 +206,7 @@ files:
|
|
206
206
|
- lib/xapixctl.rb
|
207
207
|
- lib/xapixctl/base_cli.rb
|
208
208
|
- lib/xapixctl/cli.rb
|
209
|
+
- lib/xapixctl/connector_cli.rb
|
209
210
|
- lib/xapixctl/phoenix_client.rb
|
210
211
|
- lib/xapixctl/phoenix_client/connection.rb
|
211
212
|
- lib/xapixctl/phoenix_client/organization_connection.rb
|
@@ -213,6 +214,7 @@ files:
|
|
213
214
|
- lib/xapixctl/phoenix_client/result_handler.rb
|
214
215
|
- lib/xapixctl/preview_cli.rb
|
215
216
|
- lib/xapixctl/sync_cli.rb
|
217
|
+
- lib/xapixctl/titan_cli.rb
|
216
218
|
- lib/xapixctl/util.rb
|
217
219
|
- lib/xapixctl/version.rb
|
218
220
|
- xapixctl.gemspec
|