kinetic_sdk 5.0.21 → 5.0.22
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +8 -0
- data/lib/kinetic_sdk/core/lib/space.rb +148 -2
- data/lib/kinetic_sdk/task/lib/export.rb +4 -2
- data/lib/kinetic_sdk/task/lib/trees.rb +46 -50
- data/lib/kinetic_sdk/version.rb +1 -1
- metadata +3 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: b17c766fc56a33bded34c6f5bdae883e79ffd9348ed9afd78fc6b43401f84640
|
4
|
+
data.tar.gz: 3eb7e0ec3f5cd65e45d00528e950a2c16cd3acf7b53bb264c8edb2fd7ed41df1
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 1a3b3db31c8cd917681b38a07c9fb37e6ee4339d7e772059c3452f6843284d74d28d9808ad726a05554d662f5e0ad469ab7caf8b9777b8b917cd9a11b5238f80
|
7
|
+
data.tar.gz: b101040d5f1fc3fecdced915cecb4886b140d8eb71d5638132d6a016c047311a6d98e7383357f15bca53a323cdefe39354ca9502618bee274040d32a69533380
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,13 @@
|
|
1
1
|
# Change Log
|
2
2
|
|
3
|
+
## [5.0.22](https://github.com/kineticdata/kinetic-sdk-rb/tree/5.0.22) (2023-08-24)
|
4
|
+
|
5
|
+
**Implemented enhancements:**
|
6
|
+
|
7
|
+
- Kinetic Core integrated workflow import / export enhancements and bug fixes
|
8
|
+
- Added export option flag to include Kinetic Core workflows when exporting trees
|
9
|
+
- Skip Kinetic Core workflows when exporting trees unless explicitly included
|
10
|
+
|
3
11
|
## [5.0.21](https://github.com/kineticdata/kinetic-sdk-rb/tree/5.0.21) (2023-06-30)
|
4
12
|
|
5
13
|
**Implemented enhancements:**
|
@@ -85,11 +85,65 @@ module KineticSdk
|
|
85
85
|
"space.webApis.{slug}",
|
86
86
|
"space.webhooks.{name}",
|
87
87
|
)
|
88
|
-
core_data =
|
88
|
+
core_data = find_space({ 'export' => true }, headers).content
|
89
89
|
process_export(@options[:export_directory], export_shape, core_data)
|
90
|
+
export_workflows(headers)
|
90
91
|
@logger.info("Finished exporting space definition to #{@options[:export_directory]}.")
|
91
92
|
end
|
92
93
|
|
94
|
+
# Exports linked workflows for the space, kapps, and forms. This method is automatically called from `KineticSdk.Core.export_space()`.
|
95
|
+
#
|
96
|
+
# @param headers [Hash] hash of headers to send, default is basic authentication and accept JSON content type
|
97
|
+
# @return nil
|
98
|
+
def export_workflows(headers=default_headers)
|
99
|
+
# Workflows were introduced in core v6
|
100
|
+
version = app_version(headers).content["version"]
|
101
|
+
if version && version["version"] < "6"
|
102
|
+
@logger.info("Skip exporting workflows because the Core server version doesn't support workflows.")
|
103
|
+
return
|
104
|
+
end
|
105
|
+
|
106
|
+
raise StandardError.new "An export directory must be defined to export workflows." if @options[:export_directory].nil?
|
107
|
+
@logger.info("Exporting workflows to #{@options[:export_directory]}.")
|
108
|
+
|
109
|
+
# space workflows
|
110
|
+
space_workflows = find_space_workflows({ "include" => "details" }, headers).content["workflows"] || []
|
111
|
+
space_workflows.select { |wf| !wf["event"].nil? }.each do |workflow|
|
112
|
+
@logger.info(workflow) unless workflow["name"]
|
113
|
+
evt = workflow["event"].slugify
|
114
|
+
name = workflow["name"].slugify
|
115
|
+
filename = "#{File.join(@options[:export_directory], "space", "workflows", evt, name)}.json"
|
116
|
+
workflow_json = find_space_workflow(workflow["id"], {}, headers).content["treeJson"]
|
117
|
+
write_object_to_file(filename, workflow_json)
|
118
|
+
end
|
119
|
+
|
120
|
+
space_content = find_space({ 'include' => "kapps.details,kapps.forms.details" }).content["space"]
|
121
|
+
|
122
|
+
# kapp workflows
|
123
|
+
space_content["kapps"].each do |kapp|
|
124
|
+
kapp_workflows = find_kapp_workflows(kapp["slug"], {}, headers).content["workflows"] || []
|
125
|
+
kapp_workflows.select { |wf| !wf["event"].nil? }.each do |workflow|
|
126
|
+
evt = workflow["event"].slugify
|
127
|
+
name = workflow["name"].slugify
|
128
|
+
filename = "#{File.join(@options[:export_directory], "space", "kapps", kapp["slug"], "workflows", evt, name)}.json"
|
129
|
+
workflow_json = find_kapp_workflow(kapp["slug"], workflow["id"], {}, headers).content["treeJson"]
|
130
|
+
write_object_to_file(filename, workflow_json)
|
131
|
+
end
|
132
|
+
|
133
|
+
# form workflows
|
134
|
+
kapp["forms"].each do |form|
|
135
|
+
form_workflows = find_form_workflows(kapp["slug"], form["slug"], {}, headers).content["workflows"] || []
|
136
|
+
form_workflows.select { |wf| !wf["event"].nil? }.each do |workflow|
|
137
|
+
evt = workflow["event"].slugify
|
138
|
+
name = workflow["name"].slugify
|
139
|
+
filename = "#{File.join(@options[:export_directory], "space", "kapps", kapp["slug"], "forms", form["slug"], "workflows", evt, name)}.json"
|
140
|
+
workflow_json = find_form_workflow(kapp["slug"], form["slug"], workflow["id"], {}, headers).content["treeJson"]
|
141
|
+
write_object_to_file(filename, workflow_json)
|
142
|
+
end
|
143
|
+
end
|
144
|
+
end
|
145
|
+
end
|
146
|
+
|
93
147
|
# Find the space
|
94
148
|
#
|
95
149
|
# @param params [Hash] Query parameters that are added to the URL, such as +include+
|
@@ -100,7 +154,8 @@ module KineticSdk
|
|
100
154
|
get("#{@api_url}/space", params, headers)
|
101
155
|
end
|
102
156
|
|
103
|
-
# Imports a full space definition from the export_directory
|
157
|
+
# Imports a full space definition from the export_directory, except for workflows. Those must be imported separately after
|
158
|
+
# the Kinetic Platform source exists in task.
|
104
159
|
#
|
105
160
|
# @param slug [String] the slug of the space that is being imported
|
106
161
|
# @param headers [Hash] hash of headers to send, default is basic authentication and accept JSON content type
|
@@ -146,6 +201,97 @@ module KineticSdk
|
|
146
201
|
@logger.info("Finished importing space definition to #{@options[:export_directory]}.")
|
147
202
|
end
|
148
203
|
|
204
|
+
# Imports the workflows for the space. This method should be called after importing the Kinetic Platform source into task.
|
205
|
+
#
|
206
|
+
# @param slug [String] the slug of the space that is being imported
|
207
|
+
# @param headers [Hash] hash of headers to send, default is basic authentication and accept JSON content type
|
208
|
+
# @return nil
|
209
|
+
def import_workflows(slug, headers=default_headers)
|
210
|
+
# Workflows were introduced in core v6
|
211
|
+
version = app_version(headers).content["version"]
|
212
|
+
if version && version["version"] < "6"
|
213
|
+
@logger.info("Skip importing workflows because the Core server version doesn't support workflows.")
|
214
|
+
return
|
215
|
+
end
|
216
|
+
|
217
|
+
raise StandardError.new "An export directory must be defined to import space." if @options[:export_directory].nil?
|
218
|
+
@logger.info("Importing workflows from #{@options[:export_directory]}.")
|
219
|
+
|
220
|
+
# Map of existing workflows by space, kapp, form
|
221
|
+
existing_workflows_cache = {}
|
222
|
+
# Regular expressions to match workflow paths by space, kapp, or form
|
223
|
+
form_re = /^\/kapps\/(?<kapp_slug>[a-z0-9]+(?:-[a-z0-9]+)*)\/forms\/(?<form_slug>[a-z0-9]+(?:-[a-z0-9]+)*)\/workflows/
|
224
|
+
kapp_re = /^\/kapps\/(?<kapp_slug>[a-z0-9]+(?:-[a-z0-9]+)*)\/workflows/
|
225
|
+
space_re = /^\/workflows/
|
226
|
+
|
227
|
+
# Loop over all provided files sorting files before folders
|
228
|
+
Dir["#{@options[:export_directory]}/space/**/workflows/**/*.json"].map { |file| [file.count("/"), file] }.sort.map { |file| file[1] }.each do |file|
|
229
|
+
rel_path = file.sub("#{@options[:export_directory]}/", '')
|
230
|
+
path_parts = File.dirname(rel_path).split(File::SEPARATOR)
|
231
|
+
api_parts_path = path_parts[0..-1]
|
232
|
+
api_path = "/#{api_parts_path.join("/").sub(/^space\//,'').sub(/\/[^\/]+$/,'')}"
|
233
|
+
|
234
|
+
# populate the existing workflows for the workflowable object
|
235
|
+
matches = form_re.match(api_path)
|
236
|
+
# form workflows
|
237
|
+
if matches
|
238
|
+
form_slug = matches["form_slug"]
|
239
|
+
kapp_slug = matches["kapp_slug"]
|
240
|
+
map_key = self.space_slug + "|" + kapp_slug + "|" + form_slug
|
241
|
+
if !existing_workflows_cache.has_key?(map_key)
|
242
|
+
response = find_form_workflows(kapp_slug, form_slug, { "includes" => "details" }, headers)
|
243
|
+
existing_workflows_cache[map_key] = response.content["workflows"]
|
244
|
+
end
|
245
|
+
else
|
246
|
+
matches = kapp_re.match(api_path)
|
247
|
+
# kapp workflows
|
248
|
+
if matches
|
249
|
+
kapp_slug = matches["kapp_slug"]
|
250
|
+
map_key = self.space_slug + "|" + kapp_slug
|
251
|
+
if !existing_workflows_cache.has_key?(map_key)
|
252
|
+
response = find_kapp_workflows(kapp_slug, { "includes" => "details" }, headers)
|
253
|
+
existing_workflows_cache[map_key] = response.content["workflows"]
|
254
|
+
end
|
255
|
+
else
|
256
|
+
# space workflows
|
257
|
+
map_key = self.space_slug
|
258
|
+
if !existing_workflows_cache.has_key?(map_key)
|
259
|
+
response = find_space_workflows({ "includes" => "details" }, headers)
|
260
|
+
existing_workflows_cache[map_key] = response.content["workflows"]
|
261
|
+
end
|
262
|
+
end
|
263
|
+
end
|
264
|
+
|
265
|
+
tree_json = JSON.parse(File.read(file))
|
266
|
+
event = path_parts.last.split("-").map { |part| part.capitalize }.join(" ")
|
267
|
+
name = tree_json["name"]
|
268
|
+
|
269
|
+
body = {
|
270
|
+
"event" => event,
|
271
|
+
"name" => name,
|
272
|
+
"treeJson" => tree_json
|
273
|
+
}
|
274
|
+
|
275
|
+
# check if the workflow already exists
|
276
|
+
existing_workflow = (existing_workflows_cache[map_key] || []).select { |wf|
|
277
|
+
wf["event"] == event && wf["name"] == name
|
278
|
+
}.first
|
279
|
+
|
280
|
+
if existing_workflow
|
281
|
+
workflow_id = existing_workflow["id"]
|
282
|
+
url = "#{@api_url}#{api_path}/#{workflow_id}"
|
283
|
+
@logger.info("Updating #{event} workflow #{workflow_id} from #{rel_path} to #{url}")
|
284
|
+
resp = put(url, body, headers)
|
285
|
+
@logger.warn("Failed to update workflow (#{resp.code}): #{resp.content}") unless resp.code == "200"
|
286
|
+
else
|
287
|
+
url = "#{@api_url}#{api_path}"
|
288
|
+
@logger.info("Importing #{event} workflow from #{rel_path} to #{url}")
|
289
|
+
resp = post(url, body, headers)
|
290
|
+
@logger.warn("Failed to import workflow (#{resp.code}): #{resp.content}") unless resp.code == "200"
|
291
|
+
end
|
292
|
+
end
|
293
|
+
end
|
294
|
+
|
149
295
|
# Checks if the space exists
|
150
296
|
#
|
151
297
|
# @param slug [String] slug of the space
|
@@ -15,10 +15,12 @@ module KineticSdk
|
|
15
15
|
# * access keys
|
16
16
|
#
|
17
17
|
# @param headers [Hash] hash of headers to send, default is basic authentication
|
18
|
+
# @param export_opts [Hash] hash of export options
|
19
|
+
# - :include_workflows => true|false (default: false)
|
18
20
|
# @return nil
|
19
|
-
def export(headers=header_basic_auth)
|
21
|
+
def export(headers=header_basic_auth, export_opts={})
|
20
22
|
export_sources(headers)
|
21
|
-
export_trees(nil,headers) # Includes routines when nil passed
|
23
|
+
export_trees(nil,headers,export_opts) # Includes routines when nil passed
|
22
24
|
export_handlers(headers)
|
23
25
|
export_groups(headers)
|
24
26
|
export_policy_rules(headers)
|
@@ -213,92 +213,88 @@ module KineticSdk
|
|
213
213
|
get("#{@api_url}/trees/guid/#{tree_id}", params, headers)
|
214
214
|
end
|
215
215
|
|
216
|
-
# Export a single tree or routine
|
216
|
+
# Export a single tree or routine. This method will not export Kinetic Core
|
217
|
+
# workflows unless `export_opts[:include_workflows] => true` export option
|
218
|
+
# is provided.
|
217
219
|
#
|
218
220
|
# @param title [String] the title of the tree or routine
|
219
221
|
# @param headers [Hash] hash of headers to send, default is basic authentication
|
222
|
+
# @param export_opts [Hash] hash of export options
|
223
|
+
# - :include_workflows => true|false (default: false)
|
220
224
|
# @return nil
|
221
225
|
#
|
222
|
-
def export_tree(title, headers=header_basic_auth)
|
226
|
+
def export_tree(title, headers=header_basic_auth, export_opts={})
|
223
227
|
raise StandardError.new "An export directory must be defined to export a tree." if @options[:export_directory].nil?
|
224
228
|
@logger.info("Exporting tree \"#{title}\" to #{@options[:export_directory]}.")
|
225
229
|
# Get the tree
|
226
|
-
response = find_tree(title, { "include" => "export" })
|
230
|
+
response = find_tree(title, { "include" => "details,export" })
|
227
231
|
# Parse the response and export the tree
|
228
232
|
tree = response.content
|
233
|
+
if export_opts[:include_workflows] || (!tree.has_key?("event") || tree["event"].nil?)
|
234
|
+
# determine which directory to write the file to
|
235
|
+
if tree['sourceGroup'] == "-"
|
236
|
+
# Create the directory if it doesn't yet exist
|
237
|
+
routine_dir = FileUtils::mkdir_p(File.join(@options[:export_directory], "routines"))
|
238
|
+
tree_file = File.join(routine_dir, "#{tree['name'].slugify}.xml")
|
239
|
+
else
|
240
|
+
# Create the directory if it doesn't yet exist
|
241
|
+
tree_dir = FileUtils::mkdir_p(File.join(@options[:export_directory],"sources", tree['sourceName'].slugify , "trees"))
|
242
|
+
tree_file = File.join(tree_dir, "#{tree['sourceGroup'].slugify}.#{tree['name'].slugify}.xml")
|
243
|
+
end
|
229
244
|
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
# write the file
|
242
|
-
server_version = server_info(headers).content["version"]
|
243
|
-
if server_version > "04.03.0z"
|
244
|
-
File.write(tree_file, tree['export'])
|
245
|
+
# write the file
|
246
|
+
server_version = server_info(headers).content["version"]
|
247
|
+
if server_version > "04.03.0z"
|
248
|
+
File.write(tree_file, tree['export'])
|
249
|
+
else
|
250
|
+
xml_doc = REXML::Document.new(tree["export"])
|
251
|
+
xml_doc.context[:attribute_quote] = :quote
|
252
|
+
xml_formatter = Prettier.new
|
253
|
+
xml_formatter.write(xml_doc, File.open(tree_file, "w"))
|
254
|
+
end
|
255
|
+
@logger.info("Exported #{tree['type']}: #{tree['title']} to #{tree_file}")
|
245
256
|
else
|
246
|
-
|
247
|
-
xml_doc.context[:attribute_quote] = :quote
|
248
|
-
xml_formatter = Prettier.new
|
249
|
-
xml_formatter.write(xml_doc, File.open(tree_file, "w"))
|
257
|
+
@logger.info("Did not export #{tree['type']}: #{tree['title']} because it is a Core linked workflow")
|
250
258
|
end
|
251
|
-
@logger.info("Exported #{tree['type']}: #{tree['title']} to #{tree_file}")
|
252
259
|
end
|
253
260
|
|
254
|
-
# Export
|
261
|
+
# Export trees and local routines for a source, and global routines. This method will
|
262
|
+
# not export Kinetic Core workflows unless `export_opts[:include_workflows] => true`
|
263
|
+
# export option is provided.
|
255
264
|
#
|
256
265
|
# @param source_name [String] Name of the source to export trees and local routines
|
257
266
|
# - Leave blank or pass nil to export all trees and global routines
|
258
267
|
# - Pass "-" to export only global routines
|
259
268
|
# @param headers [Hash] hash of headers to send, default is basic authentication
|
269
|
+
# @param export_opts [Hash] hash of export options
|
270
|
+
# - :include_workflows => true|false (default: false)
|
260
271
|
# @return nil
|
261
|
-
def export_trees(source_name=nil, headers=header_basic_auth)
|
272
|
+
def export_trees(source_name=nil, headers=header_basic_auth, export_opts={})
|
262
273
|
raise StandardError.new "An export directory must be defined to export trees." if @options[:export_directory].nil?
|
263
274
|
if source_name.nil?
|
264
|
-
|
275
|
+
if export_opts[:include_workflows]
|
276
|
+
@logger.info("Exporting all trees, routines, and workflows to #{@options[:export_directory]}.")
|
277
|
+
else
|
278
|
+
@logger.info("Exporting all trees and routines to #{@options[:export_directory]}.")
|
279
|
+
end
|
265
280
|
export_routines(headers)
|
266
281
|
(find_sources({}, headers).content["sourceRoots"] || []).each do |sourceRoot|
|
267
|
-
export_trees(sourceRoot['name'])
|
282
|
+
export_trees(sourceRoot['name'], headers, export_opts)
|
268
283
|
end
|
269
284
|
return
|
270
285
|
elsif source_name == "-"
|
271
286
|
@logger.info("Exporting global routines to #{@options[:export_directory]}.")
|
272
287
|
else
|
273
|
-
@logger.info("Exporting trees and routines for source \"#{source_name}\" to #{@options[:export_directory]}.")
|
288
|
+
@logger.info("Exporting trees and local routines for source \"#{source_name}\" to #{@options[:export_directory]}.")
|
274
289
|
end
|
275
290
|
|
276
291
|
# Get all the trees and routines for the source
|
277
|
-
response = find_trees({ "source" => source_name, "include" => "
|
292
|
+
response = find_trees({ "source" => source_name, "include" => "details" }, headers)
|
278
293
|
# Parse the response and export each tree
|
279
294
|
(response.content["trees"] || []).each do |tree|
|
280
|
-
|
281
|
-
|
282
|
-
# create the directory if it doesn't yet exist
|
283
|
-
routine_dir = FileUtils::mkdir_p(File.join(@options[:export_directory], "routines"))
|
284
|
-
tree_file = File.join(routine_dir, "#{tree['name'].slugify}.xml")
|
285
|
-
else
|
286
|
-
# create the directory if it doesn't yet exist
|
287
|
-
tree_dir = FileUtils::mkdir_p(File.join(@options[:export_directory], "sources", source_name.slugify ,"trees"))
|
288
|
-
tree_file = File.join(tree_dir, "#{tree['sourceGroup'].slugify}.#{tree['name'].slugify}.xml")
|
295
|
+
if export_opts[:include_workflows] || (!tree.has_key?("event") || tree["event"].nil?)
|
296
|
+
export_tree(tree['title'], headers, export_opts)
|
289
297
|
end
|
290
|
-
|
291
|
-
# write the file
|
292
|
-
server_version = server_info(headers).content["version"]
|
293
|
-
if server_version > "04.03.0z"
|
294
|
-
File.write(tree_file, tree['export'])
|
295
|
-
else
|
296
|
-
xml_doc = REXML::Document.new(tree["export"])
|
297
|
-
xml_doc.context[:attribute_quote] = :quote
|
298
|
-
xml_formatter = Prettier.new
|
299
|
-
xml_formatter.write(xml_doc, File.open(tree_file, "w"))
|
300
|
-
end
|
301
|
-
@logger.info("Exported #{tree['type']}: #{tree['title']} to #{tree_file}")
|
302
298
|
end
|
303
299
|
end
|
304
300
|
|
data/lib/kinetic_sdk/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: kinetic_sdk
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 5.0.
|
4
|
+
version: 5.0.22
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Kinetic Data
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-
|
11
|
+
date: 2023-08-24 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: slugify
|
@@ -745,7 +745,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
745
745
|
- !ruby/object:Gem::Version
|
746
746
|
version: '0'
|
747
747
|
requirements: []
|
748
|
-
rubygems_version: 3.
|
748
|
+
rubygems_version: 3.3.3
|
749
749
|
signing_key:
|
750
750
|
specification_version: 4
|
751
751
|
summary: Ruby SDK for Kinetic Data application APIs
|