persistence-providers 0.0.3.5 → 0.0.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 62b3d49ea7a138d6f1f9e99e0b8f776dcc0212ea24e8fba1bc35a2876f466951
4
- data.tar.gz: 4fecccfff86f73b5b9ab5f647294bb5b866b465bfcc2f59c1991722da5cc0b68
3
+ metadata.gz: 9bc555b9367087540ccddc49bc1ce3fa937b0a36be7b3d6ff832373a9beeadd0
4
+ data.tar.gz: 5ae84500a46b183544d53b1d7d2f90cf6b49bcf52d34d2b95eba8474f7653c93
5
5
  SHA512:
6
- metadata.gz: 7be9867552f0c0cc2cf66cdea3295b6c02c41909d92c645178244b256b4a67079f595ead4a48c91c33a116ba4323498001665ea9e089d68b37941c1618111a7e
7
- data.tar.gz: 7bc22d498ec484b7466853e5ca550554f6567769596417b41f086e515fb58ac20b62b2074638cebee00a7f0c5af396dce8f68684b098374407d673338d6749e7
6
+ metadata.gz: 52cc878735fdf04cb19b7a202ff2622872b4514f40c1b849a0ff73a821f4e8e333c1aa9a3ac6ba6b31de29b199f4c49fb5fbd3df2c2bdf64fd4d421f6ff67e7d
7
+ data.tar.gz: a9b7fbf25e02a5b1dc15220760e91ca1bc703e7f1c2f388f6f2b083c48170c1162ffc483464cbde99f31e2465dd687c78bc47006a5ad9ee6157c5c40538083e9
@@ -1,10 +1,16 @@
1
1
  require 'kubeclient'
2
2
  require 'celluloid/io'
3
- require 'singleton'
3
+ # require 'singleton'
4
4
 
5
5
  module DTK
6
6
  class CrdClient
7
- include Singleton
7
+ # include Singleton
8
+ DEFAULT_API_VERSION = 'v1alpha1'
9
+
10
+ # COMPONENT_DEF_CRD_VERSION = ENV["COMPONENT_DEF_CRD_VERSION"]
11
+ # ASSEMBLY_CRD_VERSION = ENV["ASSEMBLY_CRD_VERSION"]
12
+ # WORKFLOW_CRD_VERSION = ENV["WORKFLOW_CRD_VERSION"]
13
+ # WORKFLOW_INSTANCE_CRD_VERSION = ENV["WORKFLOW_INSTANCE_CRD_VERSION"]
8
14
 
9
15
  attr_accessor :kubeclient
10
16
 
@@ -12,17 +18,26 @@ module DTK
12
18
  if @kubeclient = opts[:kubeclient]
13
19
  @kubeclient
14
20
  else
15
- ::DTK::CrdClient.instance.kubeclient
21
+ kubeclient_version(opts)
16
22
  end
17
23
  end
18
24
 
19
- # opts can have keys
20
- # kubernetes_client - already instantiated kubernetes client
21
- def initialize(opts = {})
22
- if @kubeclient = opts[:kubernetes_client]
23
- return @kubeclient
25
+ def self.kubeclient_version(opts = {})
26
+ version = opts[:apiVersion] || DEFAULT_API_VERSION
27
+
28
+ if existing_version = KubeclientVersions[version]
29
+ return existing_version
30
+ else
31
+ new_instance = new(version).kubeclient
32
+ KubeclientVersions[version] = new_instance
33
+ new_instance
24
34
  end
35
+ end
36
+ KubeclientVersions = {}
25
37
 
38
+ # opts can have keys
39
+ # kubernetes_client - already instantiated kubernetes client
40
+ def initialize(apiVersion)
26
41
  ssl_options = {}
27
42
  auth_options = { bearer_token_file: '/var/run/secrets/kubernetes.io/serviceaccount/token' }
28
43
 
@@ -37,7 +52,7 @@ module DTK
37
52
 
38
53
  @kubeclient = Kubeclient::Client.new(
39
54
  'https://kubernetes.default.svc/apis/',
40
- 'dtk.io/v1alpha1',
55
+ "dtk.io/#{apiVersion}",
41
56
  auth_options: auth_options,
42
57
  ssl_options: ssl_options,
43
58
  socket_options: socket_options
@@ -1,5 +1,6 @@
1
1
  module DTK
2
2
  module State
3
+ require_relative 'utils'
3
4
  require_relative 'crd_client'
4
5
  require_relative 'state/crd_assembly'
5
6
  require_relative 'state/component'
@@ -17,8 +17,8 @@ module DTK::State
17
17
  # task_id
18
18
  def self.get(crd_assembly_namespace, crd_assembly_name, component_name, opts = {})
19
19
  crd_assembly = CrdAssembly.get(crd_assembly_namespace, crd_assembly_name, opts)
20
- if matching_component = crd_assembly.components.find{ |cmp| cmp.to_hash.keys.first.to_s == component_name }
21
- Component.new(component_name, matching_component[component_name], crd_assembly, opts)
20
+ if matching_component = Component.find_matching_component(crd_assembly, component_name)
21
+ Component.new(component_name, Component.get_component_content(matching_component, component_name), crd_assembly, opts)
22
22
  end
23
23
  end
24
24
 
@@ -54,6 +54,20 @@ module DTK::State
54
54
 
55
55
  private
56
56
 
57
+ def self.get_component_content(matching_component, component_name)
58
+ return matching_component.is_a?(String) ? {} : matching_component[component_name]
59
+ end
60
+
61
+ def self.find_matching_component(assembly, component_name)
62
+ assembly.components.find do |cmp|
63
+ if cmp.is_a? String
64
+ cmp == component_name
65
+ else
66
+ cmp.to_hash.keys.first.to_s == component_name
67
+ end
68
+ end
69
+ end
70
+
57
71
  def get_component_def(opts = {})
58
72
  destructured_component = destructure_component_full_name
59
73
  component_def = @component_defs.find { |component_def| component_def[:name] == destructured_component[:component_def_name] }
@@ -3,6 +3,7 @@ module DTK::State
3
3
  class Influxdb < self
4
4
  require_relative('influxdb/client')
5
5
  require_relative('influxdb/measurement')
6
+ require_relative('influxdb/semantictype')
6
7
 
7
8
  attr_reader :client, :measurement
8
9
 
@@ -12,88 +13,51 @@ module DTK::State
12
13
  end
13
14
 
14
15
  def get(namespace, component_name, assembly_name, attribute_name, opts = {})
15
- required_tags = get_required_tags(namespace, component_name, assembly_name, attribute_name)
16
- if opts[:provider] == "correlation"
17
- errors = client.measurement_helper(:errors)
18
- required_tags.merge!({ correlator_type: opts[:entrypoint].split("/").last.split(".")[0] })
19
- errors.get_last_point(required_tags)
20
- elsif
21
- last_value = measurement.get_last_point(required_tags)
22
- last_value
23
- end
16
+ required_tags = measurement.get_required_tags(namespace, component_name, assembly_name, attribute_name)
17
+ required_tags.merge! measurement.get_correlator_type(opts[:entrypoint]) if opts[:provider] == "correlation"
18
+ measurement.get_last_point(required_tags)
19
+ rescue => e
20
+ raise "Error happened while getting attribute from InfluxDB.\nError: #{e}"
24
21
  end
25
22
 
26
23
  def write(namespace, component_name, assembly_name, attribute_name, value, opts = {}, timestamp = nil)
27
- if opts[:provider] == "correlation"
28
- errors = client.measurement_helper(:errors)
29
- required_tags = get_required_tags(namespace, component_name, assembly_name, attribute_name)
30
- required_tags.merge!({ correlator_type: opts[:entrypoint].split("/").last.split(".")[0] })
31
- errors.write(value.to_s, required_tags, timestamp)
32
- elsif
33
- required_tags = get_required_tags(namespace, component_name, assembly_name, attribute_name)
34
- measurement.write(value, required_tags, timestamp)
35
- end
24
+ required_tags = measurement.get_required_tags(namespace, component_name, assembly_name, attribute_name)
25
+ required_tags.merge! measurement.get_correlator_type(opts[:entrypoint]) if opts[:provider] == "correlation"
26
+ measurement.write(value.to_s, required_tags, timestamp)
27
+ rescue => e
28
+ raise "Error happened while writing attribute into InfluxDB.\Error: #{e}"
36
29
  end
37
30
 
38
31
  def write_event(event_id, pod_name, pod_namespace, event_source, event_message, component_name, attribute_name, task_id, timestamp)
39
- begin
40
- fail "Bad timestamp input, write operation wont be completed" if timestamp > Time.new
41
- value_to_write = { event_source: event_source, event_message: event_message }
42
- required_tags = {
43
- event_id: event_id,
44
- pod_name: pod_name,
45
- pod_namespace: pod_namespace,
46
- component_name: component_name,
47
- attribute_name: attribute_name,
48
- task_id: task_id
49
- }
50
- measurement.write(value_to_write.to_s, required_tags, timestamp)
51
- rescue => error
52
- puts error
53
- end
32
+ fail "Bad timestamp input, write operation wont be completed" if timestamp > Time.new
33
+ value_to_write = { event_source: event_source, event_message: event_message }
34
+ required_tags = measurement.get_required_tags(event_id, pod_name, pod_namespace, component_name, attribute_name, task_id)
35
+ measurement.write(value_to_write.to_s, required_tags, timestamp)
36
+ rescue => error
37
+ raise "Error happened while writing event into InfluxDB.\nError: #{e}"
54
38
  end
55
39
 
56
40
  def get_event(event_id, pod_name, pod_namespace, component_name, attribute_name, task_id)
57
- required_tags = {
58
- event_id: event_id,
59
- pod_name: pod_name,
60
- pod_namespace: pod_namespace,
61
- component_name: component_name,
62
- attribute_name: attribute_name,
63
- task_id: task_id
64
- }
41
+ required_tags = measurement.get_required_tags(event_id, pod_name, pod_namespace, component_name, attribute_name, task_id)
65
42
  last_point = measurement.get_last_point(required_tags)
43
+ rescue => e
44
+ raise "Error happened while getting event from InfluxDB.\nError: #{e}"
66
45
  end
67
46
 
68
47
  def write_state(type, name, namespace, object_state, spec, status, component_name, attribute_name, task_id, timestamp)
69
- begin
70
- fail "Bad timestamp input, write operation wont be completed" if timestamp > Time.new
71
- value_to_write = { spec: spec, status: status }
72
- required_tags = {
73
- type: type,
74
- name: name,
75
- namespace: namespace,
76
- object_state: object_state,
77
- component_name: component_name,
78
- attribute_name: attribute_name,
79
- task_id: task_id
80
- }
81
- measurement.write(value_to_write.to_s, required_tags, timestamp)
82
- rescue => error
83
- puts error
84
- end
48
+ raise "Bad timestamp input, write operation to InfluxDB wont be completed" if timestamp > Time.new
49
+ value_to_write = { spec: spec, status: status }
50
+ required_tags = measurement.get_required_tags(type, name, namespace, object_state, component_name, attribute_name, task_id)
51
+ measurement.write(value_to_write.to_s, required_tags, timestamp)
52
+ rescue => e
53
+ raise "Error happened while writing state into InfluxDB.\nError: #{e}"
85
54
  end
86
-
87
- private
88
55
 
89
- def get_required_tags(namespace, component_name, assembly_name, attribute_name)
90
- required_tags = {
91
- namespace: namespace,
92
- component_name: component_name,
93
- assembly_name: assembly_name,
94
- attribute_name: attribute_name,
95
- task_id: "1"
96
- }
56
+ def get_state(type, name, namespace, object_state, component_name, attribute_name, task_id)
57
+ required_tags = measurement.get_required_tags(type, name, namespace, object_state, component_name, attribute_name, task_id)
58
+ measurement.get_last_point(required_tags)
59
+ rescue => e
60
+ raise "Error happened while getting state from InfluxDB.\nError: #{e}"
97
61
  end
98
62
  end
99
63
  end
@@ -12,24 +12,22 @@ module DTK::State
12
12
  def query(query_expression)
13
13
  query_api = self.connection.create_query_api
14
14
  query_api.query(query_expression)
15
+ rescue => e
16
+ raise "Failed while processing flux query!. Error: #{e}"
15
17
  end
16
18
 
17
19
  def write_point(data)
18
- begin
19
- write_api = self.connection.create_write_api
20
- write_api.write(data: data)
21
- rescue => error
22
- fail error
23
- end
20
+ write_api = self.connection.create_write_api
21
+ write_api.write(data: data)
22
+ rescue => e
23
+ raise e
24
24
  end
25
25
 
26
26
  def measurement_helper(measurement_name)
27
- begin
28
- klass = Measurement.const_get(measurement_name.to_sym.capitalize)
29
- klass.new(measurement_name, self)
30
- rescue => error
31
- puts error
32
- end
27
+ klass = Measurement.const_get(measurement_name.to_sym.capitalize)
28
+ klass.new(measurement_name, self)
29
+ rescue => e
30
+ raise e
33
31
  end
34
32
 
35
33
  attr_reader :connection_parameters, :connection
@@ -49,18 +47,21 @@ module DTK::State
49
47
  org: params[:org],
50
48
  bucket: params[:bucket]
51
49
  }
50
+ rescue => e
51
+ raise "Problem happened while processing InfluxDB connection parameters. Error: #{e}"
52
52
  end
53
53
 
54
54
  def return_connection(connection_parameters)
55
- begin
56
- InfluxDB2::Client.new(connection_parameters[:url], connection_parameters[:token],
57
- bucket: connection_parameters[:bucket],
58
- org: connection_parameters[:org],
59
- precision: InfluxDB2::WritePrecision::MILLISECOND,
60
- use_ssl: false)
61
- rescue => error
62
- fail "Error: #{error}"
63
- end
55
+ client = InfluxDB2::Client.new(connection_parameters[:url], connection_parameters[:token],
56
+ bucket: connection_parameters[:bucket],
57
+ org: connection_parameters[:org],
58
+ precision: InfluxDB2::WritePrecision::MILLISECOND,
59
+ use_ssl: false)
60
+ query_api = client.create_query_api
61
+ query_api.query(query: 'from(bucket:"' + connection_parameters[:bucket] + '") |> range(start: -5)')
62
+ client
63
+ rescue => e
64
+ raise "Connection with InfluxDB could not be established. #{e}"
64
65
  end
65
66
  end
66
67
  end
@@ -20,14 +20,22 @@ module DTK::State
20
20
  end
21
21
 
22
22
  def get_last_point(params_hash = {})
23
- begin
24
- check_params_hash(params_hash)
25
- flux_query = 'from(bucket:"' + client.connection_parameters[:bucket] + '") |> range(start:-5) |> filter(fn: (r) => r._measurement == "' + name.to_s + '")' + flux_filter(params_hash) + ' |> last()' + '|> drop(columns: ["_start", "_stop", "_field", "_measurement", "attribute_name", "assembly_name", "task_id", "component_name", "namespace"])'
26
- result = self.client.query(flux_query)
27
- result.values.map(&:records).flatten.map(&:values)
28
- rescue => error
29
- fail error
30
- end
23
+ check_params_hash(params_hash)
24
+ flux_query = 'from(bucket:"' + client.connection_parameters[:bucket] + '") |> range(start:-5) |> filter(fn: (r) => r._measurement == "' + name.to_s + '")' + flux_filter(params_hash) + ' |> last()' + '|> drop(columns: ["_start", "_stop", "_field", "_measurement", "attribute_name", "assembly_name", "task_id", "component_name", "namespace"])'
25
+ result = self.client.query(query: flux_query)
26
+ result.values.map(&:records).flatten.map(&:values)
27
+ rescue => e
28
+ raise "Failed while getting last attribute point. Error: #{e}"
29
+ end
30
+
31
+ def get_required_tags(namespace, component_name, assembly_name, attribute_name)
32
+ required_tags = {
33
+ namespace: namespace,
34
+ component_name: component_name,
35
+ assembly_name: assembly_name,
36
+ attribute_name: attribute_name,
37
+ task_id: "1"
38
+ }
31
39
  end
32
40
 
33
41
  protected
@@ -90,7 +98,7 @@ module DTK::State
90
98
  fail "Parameter '#{name}' has an illegal type, legal types are #{LEGAL_TAG_CLASSES.join(', ')}"
91
99
  end
92
100
  end
93
-
101
+
94
102
  end
95
103
  end
96
104
  end
@@ -8,6 +8,12 @@ module DTK::State
8
8
  write_point(value, checked_params_hash, timestamp)
9
9
  end
10
10
 
11
+ def get_correlator_type(entrypoint)
12
+ {
13
+ correlator_type: entrypoint.split("/").last.split(".")[0]
14
+ }
15
+ end
16
+
11
17
  protected
12
18
 
13
19
  def required_params
@@ -8,12 +8,23 @@ module DTK::State
8
8
  write_point(value, checked_params_hash, timestamp)
9
9
  end
10
10
 
11
+ def get_required_tags(event_id, pod_name, pod_namespace, component_name, attribute_name, task_id)
12
+ {
13
+ event_id: event_id,
14
+ pod_name: pod_name,
15
+ pod_namespace: pod_namespace,
16
+ component_name: component_name,
17
+ attribute_name: attribute_name,
18
+ task_id: task_id
19
+ }
20
+ end
21
+
11
22
  protected
12
23
 
13
24
  def required_params
14
25
  [:event_id, :pod_name, :pod_namespace, :component_name, :attribute_name, :task_id]
15
26
  end
16
-
27
+
17
28
  end
18
29
  end
19
30
  end
@@ -8,12 +8,24 @@ module DTK::State
8
8
  write_point(value, checked_params_hash, timestamp)
9
9
  end
10
10
 
11
+ def get_required_tags(type, name, namespace, object_state, component_name, attribute_name, task_id)
12
+ {
13
+ type: type,
14
+ name: name,
15
+ namespace: namespace,
16
+ object_state: object_state,
17
+ component_name: component_name,
18
+ attribute_name: attribute_name,
19
+ task_id: task_id
20
+ }
21
+ end
22
+
11
23
  protected
12
24
 
13
25
  def required_params
14
26
  [:type, :name, :namespace, :object_state, :component_name, :attribute_name, :task_id]
15
27
  end
16
-
28
+
17
29
  end
18
30
  end
19
31
  end
@@ -0,0 +1,221 @@
1
+ module DTK::State
2
+ class Component::Attribute::Influxdb
3
+ class SemanticType
4
+ require_relative './client'
5
+ attr_reader :name, :crd_content, :namespace, :client
6
+ attr_accessor :content_to_write
7
+
8
+ def initialize(name, namespace)
9
+ @name = name
10
+ @namespace = namespace
11
+ @client = Client.new
12
+ @crd_content = get(name, namespace)
13
+ @content_to_write = []
14
+ end
15
+
16
+ # no namespace because semantictype instances are going to be unique in cluster
17
+ def get(name, namespace, opts = {})
18
+ begin
19
+ semantictype = ::DTK::CrdClient.get_kubeclient(opts).get_semantictype(name, namespace)
20
+ semantictype.spec[:openAPIV3Schema]
21
+ rescue => error
22
+ fail "SemanticType attribute with name '#{name}' not found on the cluster!. Error: #{error}"
23
+ end
24
+ end
25
+
26
+ def write_semantictype_inventory(inventory, component_id)
27
+ begin
28
+ get_influxdb_properties(inventory)
29
+ content_to_write.each do |point|
30
+ point[:tags].merge!({ component_id: component_id, attribute_name: @name })
31
+ @client.write_point({
32
+ name: point[:name],
33
+ tags: point[:tags],
34
+ fields: point[:fields],
35
+ time: (Time.new.to_f * 1000).to_i
36
+ })
37
+ end
38
+ "Inventory for attribute #{name} written to InfluxDB"
39
+ rescue => error
40
+ fail "#{name} inventory write failed. Error: #{error}"
41
+ end
42
+ end
43
+
44
+ def partial_write_update(component_and_attribute, path, field_name, field_value)
45
+ parent, child = validate_parameter(path)
46
+ component_id, attribute_name = component_and_attribute.split('/')
47
+ # getting previous value for given parameters
48
+ previous_value = { }
49
+ begin
50
+ flux_query = 'from(bucket:"' + @client.connection_parameters[:bucket] + '") |> range(start:-5) |> filter(fn:(r) => r._measurement == "' + attribute_name + "_" + child[:type] + '") |> filter(fn: (r) => r.parent == "' + parent[:name] + '") |> filter(fn: (r) => r.name == "' + child[:name] + '")|> last()'
51
+ result = @client.query(query: flux_query)
52
+ previous_value = result.values.map(&:records).flatten.map(&:values)
53
+ rescue => error
54
+ fail "Partial write could not be completed. Previous point for given parameters not found!"
55
+ end
56
+ update_current(previous_value[0], get_path_to_object(path), field_name, field_value)
57
+ end
58
+
59
+ private
60
+
61
+ def update_current(previous_value, path, field_name, field_value)
62
+ tags = { }
63
+ previous_value.each_pair do |key, value|
64
+ tags[key] = value if key[0..0] != "_" && key != "result" && key != "table"
65
+ end
66
+ fields = Hash.new
67
+ fields[field_name.to_sym] = field_value
68
+ validate_fields(get_partial_definition(path), fields)
69
+ @client.write_point({
70
+ name: previous_value["_measurement"],
71
+ tags: tags,
72
+ fields: fields,
73
+ time: (Time.new.to_f * 1000).to_i
74
+ })
75
+ "Partial write update successful"
76
+ end
77
+
78
+ def get_influxdb_properties(inventory, parent_type = [:top], parent_name = nil)
79
+ content_to_write = []
80
+ properties = { }
81
+ inventory.each_pair do |key, value|
82
+ if value.class.to_s == "Array"
83
+ inventory[key].each do |element|
84
+ get_influxdb_properties(element, parent_type.push(key), inventory[:name])
85
+ end
86
+ else
87
+ properties[key] = value
88
+ end
89
+ end
90
+ resolve_property(parent_type, parent_name, properties)
91
+ parent_type.pop
92
+ "Attribute successfully validated!"
93
+ end
94
+
95
+ def resolve_property(parent_type, parent_name, properties)
96
+ definition = get_partial_definition(parent_type)
97
+ request = get_tags_and_fields(definition, properties)
98
+ validate_request(definition, request)
99
+ request[:name] = name + "_" + parent_type.last.to_s
100
+ request[:tags][:parent] = parent_name unless parent_name.nil?
101
+ content_to_write.push(request)
102
+ end
103
+
104
+ def validate_request(partial_definition, request)
105
+ begin
106
+ validate_tags(partial_definition, request[:tags])
107
+ validate_fields(partial_definition, request[:fields])
108
+ rescue => error
109
+ fail error
110
+ end
111
+ end
112
+
113
+ def get_tags_and_fields(partial_definition, properties)
114
+ tags = { }
115
+ fields = { }
116
+ properties.each_pair do |key, value|
117
+ if partial_definition[key].nil?
118
+ fail "Property '#{key}' not found in the definition of attribute"
119
+ else
120
+ if partial_definition[key][:metric].nil? || partial_definition[key][:metric] == false
121
+ tags[key] = value
122
+ else
123
+ fields[key] = value
124
+ end
125
+ end
126
+ end
127
+ {
128
+ tags: tags,
129
+ fields: fields
130
+ }
131
+ end
132
+
133
+ def validate_fields(partial_definition, fields)
134
+
135
+ partial_definition.each_pair do |key, value|
136
+ next if key == :required || value[:metric] == (false || nil)
137
+
138
+ if fields[key].nil?
139
+ fail "Field #{key} is missing. Validation of request failed!"
140
+ elsif value[:type].capitalize != fields[key].class.to_s
141
+ fail "Defined type for SemanticType attribute property '#{key}' is #{value[:type].capitalize}, #{fields[key].class} provided"
142
+ end
143
+ end
144
+ end
145
+
146
+ def validate_tags(partial_definition, tags)
147
+ partial_definition.each_pair do |key, value|
148
+ next if key == :required || value[:metric] == true
149
+
150
+ if tags[key].nil?
151
+ if value[:default].nil?
152
+ fail "Property #{key} is missing. Validation of request failed!"
153
+ else
154
+ tags[key] = value[:default]
155
+ end
156
+ else
157
+ type = tags[key].class
158
+ type = "Boolean" if type == TrueClass || type == FalseClass
159
+ if value[:type].capitalize == type.to_s
160
+ next
161
+ else
162
+ fail "Defined type for SemanticType attribute property '#{key}' is #{value[:type].capitalize}, #{type} provided"
163
+ end
164
+ end
165
+ end
166
+ end
167
+
168
+ def get_partial_definition(path)
169
+ i = 0
170
+ definition = { }
171
+ semantictype_crd = crd_content[:properties]
172
+ while i < path.length
173
+ if path[i].to_sym == :top
174
+ semantictype_crd.each_pair do |key, value|
175
+ if key == :required
176
+ definition[key] = value
177
+ else
178
+ definition[key] = value if value[:type] != "array"
179
+ end
180
+ end
181
+ else
182
+ definition = {}
183
+ definition[:required] = semantictype_crd[path[i].to_sym][:items][:required]
184
+ semantictype_crd[path[i].to_sym][:items][:properties].each_pair do |key, value|
185
+ definition[key] = value if value[:type] != "array"
186
+ end
187
+ semantictype_crd = semantictype_crd[path[i].to_sym][:items][:properties]
188
+ end
189
+ i+=1
190
+ end
191
+ definition
192
+ end
193
+
194
+ def get_path_to_object(parameter)
195
+ path = ["top"]
196
+ array = parameter.split('/')
197
+ array.each do |element|
198
+ path.push(element.split(':')[1])
199
+ end
200
+ path
201
+ end
202
+
203
+ def validate_parameter(parameter)
204
+ array_of_parameters = []
205
+ begin
206
+ parameter.split('/').each_with_index do |param, index|
207
+ name, type = param.split(':')
208
+ fail unless name && type
209
+ array_of_parameters.push({
210
+ name: name,
211
+ type: type
212
+ })
213
+ end
214
+ array_of_parameters
215
+ rescue => error
216
+ fail "Could not resolve parameter '#{parameter}'. It should be in format: 'parent:type/child:type'"
217
+ end
218
+ end
219
+ end
220
+ end
221
+ end
@@ -2,6 +2,8 @@ module DTK::State
2
2
  class ComponentDef
3
3
  require_relative 'component_def/attribute_type_info'
4
4
 
5
+ COMPONENT_DEF_CRD_VERSION = ENV["COMPONENT_DEF_CRD_VERSION"]
6
+
5
7
  attr_reader :name, :namespace, :executable_actions, :attribute_type_info
6
8
 
7
9
  def initialize(namespace, name, content)
@@ -12,6 +14,7 @@ module DTK::State
12
14
  end
13
15
 
14
16
  def self.get(namespace, name, opts = {})
17
+ opts[:apiVersion] = COMPONENT_DEF_CRD_VERSION
15
18
  crd_component_def = ::DTK::CrdClient.get_kubeclient(opts).get_componentdef(name, namespace)
16
19
  ComponentDef.new(namespace, name, crd_component_def)
17
20
  end
@@ -1,5 +1,7 @@
1
1
  module DTK::State
2
2
  class CrdAssembly
3
+ ASSEMBLY_CRD_VERSION = ENV["ASSEMBLY_CRD_VERSION"]
4
+
3
5
  attr_reader :name, :namespace, :crd_content, :components, :references
4
6
 
5
7
  def initialize(namespace, name, crd_content)
@@ -11,7 +13,7 @@ module DTK::State
11
13
  end
12
14
 
13
15
  def self.get(namespace, name, opts = {})
14
- # crd_component = ::DTK::CrdClient.instance.kubeclient.get_component(name, namespace)
16
+ opts[:apiVersion] = ASSEMBLY_CRD_VERSION
15
17
  crd_assembly = ::DTK::CrdClient.get_kubeclient(opts).get_assembly(name, namespace)
16
18
  CrdAssembly.new(namespace, name, crd_assembly)
17
19
  end
@@ -2,6 +2,8 @@ module DTK::State
2
2
  class WorkflowInstance
3
3
  require_relative 'workflow_instance/attribute_type_info'
4
4
 
5
+ WORKFLOW_INSTANCE_CRD_VERSION = ENV["WORKFLOW_INSTANCE_CRD_VERSION"]
6
+
5
7
  attr_reader :name, :namespace, :assembly, :workflow_template, :attributes, :workflow, :attribute_type_info
6
8
 
7
9
  def initialize(namespace, name, crd_content)
@@ -15,6 +17,7 @@ module DTK::State
15
17
  end
16
18
 
17
19
  def self.get(namespace, name, opts = {})
20
+ opts[:apiVersion] = WORKFLOW_INSTANCE_CRD_VERSION
18
21
  workflow_instance = ::DTK::CrdClient.get_kubeclient(opts).get_workflow_instance(name, namespace)
19
22
  WorkflowInstance.new(namespace, name, workflow_instance)
20
23
  end
@@ -26,13 +29,58 @@ module DTK::State
26
29
 
27
30
  def self.get_action_attributes(namespace, name, action_id, opts = {})
28
31
  workflow_instance = get(namespace, name, opts)
29
- action = workflow_instance.find_action(action_id)
32
+ action = WorkflowInstance.find_action(action_id, workflow_instance.workflow)
30
33
  return nil unless action
31
34
  attributes = action[:attributes] || {}
32
35
  attributes.to_h
33
36
  end
34
37
 
35
- def find_action(id, workflow = @workflow)
38
+ def self.update_action_level_result_attributes(namespace, name, attributes, action_id, opts = {})
39
+ return "Dynamic attributes do not exist for action with id #{@action_id}, nothing to update" if attributes.nil? || attributes.empty?
40
+ attributes.delete_if { |key, value| value.nil? || value.to_s.strip == '' }
41
+ opts[:apiVersion] = WORKFLOW_INSTANCE_CRD_VERSION
42
+ workflow_instance = ::DTK::CrdClient.get_kubeclient(opts).get_workflow_instance(name, namespace)
43
+ workflow = workflow_instance[:spec][:workflow]
44
+
45
+ action = WorkflowInstance.find_action(action_id, workflow)
46
+ action[:attributes] = {} if !action[:attributes]
47
+ attributes.each do |attr_name, attr_val|
48
+ action[:attributes][attr_name.to_sym] = {} unless action[:attributes][attr_name.to_sym]
49
+ unless action[:attributes][attr_name.to_sym][:hidden]
50
+ if attr_val.is_a? Hash
51
+ action[:attributes][attr_name.to_sym][:value] = attr_val[:value] || attr_val
52
+ else
53
+ action[:attributes][attr_name.to_sym][:value] = attr_val
54
+ end
55
+ end
56
+ end
57
+ ::DTK::CrdClient.get_kubeclient(opts).update_workflow_instance(workflow_instance)
58
+ end
59
+
60
+ def self.patchError!(patches, message, action_index_steps)
61
+ errorPatch = {
62
+ "op" => "add",
63
+ "path" => "/spec/status/steps/#{action_index_steps}/errorMsg",
64
+ "value" => message
65
+ }
66
+ patches << errorPatch
67
+ end
68
+
69
+ def self.update_action_status(namespace, name, parent_id, action_id, status, error_message = "", opts = {})
70
+ opts[:apiVersion] = WORKFLOW_INSTANCE_CRD_VERSION
71
+ workflow_instance = ::DTK::CrdClient.get_kubeclient(opts).get_workflow_instance(name, namespace)
72
+ steps = workflow_instance[:spec][:status][:steps]
73
+ action_index_steps = steps.find_index { |action| action[:id].eql? action_id }
74
+ patch = [{
75
+ "op" => "replace",
76
+ "path" => "/spec/status/steps/#{action_index_steps}/state",
77
+ "value" => status
78
+ }]
79
+ patchError!(patch, error_message, action_index_steps) unless error_message.empty? || error_message.nil?
80
+ ::DTK::CrdClient.get_kubeclient(opts).json_patch_workflow_instance(name, patch, namespace)
81
+ end
82
+
83
+ def self.find_action(id, workflow = @workflow)
36
84
  action = nil
37
85
  subtasks = workflow[:subtasks]
38
86
  subtasks.each do |subtask|
@@ -0,0 +1,3 @@
1
+ module Utils
2
+ require_relative 'utils/log'
3
+ end
@@ -0,0 +1,22 @@
1
+ module Utils
2
+ module Log
3
+ require 'logger'
4
+
5
+ def self.instance
6
+ @instance ||= Logger.new('/proc/1/fd/1', formatter: proc { |severity, datetime, progname, msg|
7
+ orange_color = "\x1b[33m"
8
+ white_color = "\x1b[37m"
9
+ red_color = "\x1b[31m"
10
+
11
+ date_format = datetime.strftime("%Y-%m-%d %H:%M:%S:%L")
12
+ if severity == "INFO"
13
+ "#{orange_color}[#{date_format}] - #{white_color}#{msg}\n"
14
+ elsif severity == "WARN"
15
+ "#{orange_color}[#{date_format}] [WARNING] - #{msg}\n"
16
+ elsif severity == "ERROR"
17
+ "#{red_color}[#{date_format}] [ERROR] - #{msg}\n"
18
+ end
19
+ })
20
+ end
21
+ end
22
+ end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |spec|
2
2
  spec.name = 'persistence-providers'
3
- spec.version = '0.0.3.5'
3
+ spec.version = '0.0.4.1'
4
4
  spec.author = 'Reactor8'
5
5
  spec.email = 'support@reactor8.com'
6
6
  spec.description = %q{Persistence providers plugin}
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: persistence-providers
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.3.5
4
+ version: 0.0.4.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Reactor8
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-05-12 00:00:00.000000000 Z
11
+ date: 2020-05-27 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: kubeclient
@@ -70,6 +70,7 @@ files:
70
70
  - lib/state/component/providers/influxdb/measurement/errors.rb
71
71
  - lib/state/component/providers/influxdb/measurement/events.rb
72
72
  - lib/state/component/providers/influxdb/measurement/states.rb
73
+ - lib/state/component/providers/influxdb/semantictype.rb
73
74
  - lib/state/component/providers/kube_crd.rb
74
75
  - lib/state/component_def.rb
75
76
  - lib/state/component_def/attribute_type_info.rb
@@ -78,6 +79,8 @@ files:
78
79
  - lib/state/executable_action/attribute_type_info.rb
79
80
  - lib/state/workflow_instance.rb
80
81
  - lib/state/workflow_instance/attribute_type_info.rb
82
+ - lib/utils.rb
83
+ - lib/utils/log.rb
81
84
  - persistence-providers.gemspec
82
85
  - test-destroy-influxdb.rb
83
86
  - test-influxdb.rb