persistence-providers 0.0.3.7 → 0.0.6

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: bb4b1e1715f9e1ed7b1174ff99a9adc00a13c5c0f56be3ed0d974b394fcf166d
4
- data.tar.gz: ec5385e7dea1fac44d2c9d21ef902829a4fdee986d76a9f565ec3d6043d26f3d
3
+ metadata.gz: 0a15ad06be0a52e1e087f50377fa1669846e8eb2d01ae40776a07543b8440b4b
4
+ data.tar.gz: 7f68ed470f1ca6ab2bee9b7bbf10d7003059acafbae22bf04351a52653416b0d
5
5
  SHA512:
6
- metadata.gz: 26dcc9ac221ec5be80eb8114c50ba82d67dfaec44c809dc2435d1bb062d23de6d6faf80e2f4ff1130ad7db8ed4a7d318edd1e220340fe0fda17fd89186089868
7
- data.tar.gz: 96207adcb7b8914850b27e6bc3e991221b1180354eb512df0db8595695cb998824f20be0c3ea86fef8ed00a1f8dfcd7fdc38bc506037a5194525a6ae4fcfd03b
6
+ metadata.gz: ede15c848020024c37f2a79608c96b8591691032c97d0fd10e39b964dd5fddc94f7df5e9d31383a798dcdaaeae8dbaffd8d966214ed332f872fe43bfb2ee875c
7
+ data.tar.gz: 9e7a4383fbcb11b4402b3f8cd449b8aab48f724a6cbe7ba020aa878ee44e3ee90f6dbfdbd18fb2f8fda04e3cbb0154a8fcd02a7a9d7f6f415c816eefb1ee0d2b
@@ -1,10 +1,16 @@
1
1
  require 'kubeclient'
2
2
  require 'celluloid/io'
3
- require 'singleton'
3
+ # require 'singleton'
4
4
 
5
5
  module DTK
6
6
  class CrdClient
7
- include Singleton
7
+ # include Singleton
8
+ DEFAULT_API_VERSION = 'v1alpha1'
9
+
10
+ # COMPONENT_DEF_CRD_VERSION = ENV["COMPONENT_DEF_CRD_VERSION"]
11
+ # ASSEMBLY_CRD_VERSION = ENV["ASSEMBLY_CRD_VERSION"]
12
+ # WORKFLOW_CRD_VERSION = ENV["WORKFLOW_CRD_VERSION"]
13
+ # WORKFLOW_INSTANCE_CRD_VERSION = ENV["WORKFLOW_INSTANCE_CRD_VERSION"]
8
14
 
9
15
  attr_accessor :kubeclient
10
16
 
@@ -12,17 +18,26 @@ module DTK
12
18
  if @kubeclient = opts[:kubeclient]
13
19
  @kubeclient
14
20
  else
15
- ::DTK::CrdClient.instance.kubeclient
21
+ kubeclient_version(opts)
16
22
  end
17
23
  end
18
24
 
19
- # opts can have keys
20
- # kubernetes_client - already instantiated kubernetes client
21
- def initialize(opts = {})
22
- if @kubeclient = opts[:kubernetes_client]
23
- return @kubeclient
25
+ def self.kubeclient_version(opts = {})
26
+ version = opts[:apiVersion] || DEFAULT_API_VERSION
27
+
28
+ if existing_version = KubeclientVersions[version]
29
+ return existing_version
30
+ else
31
+ new_instance = new(version).kubeclient
32
+ KubeclientVersions[version] = new_instance
33
+ new_instance
24
34
  end
35
+ end
36
+ KubeclientVersions = {}
25
37
 
38
+ # opts can have keys
39
+ # kubernetes_client - already instantiated kubernetes client
40
+ def initialize(apiVersion)
26
41
  ssl_options = {}
27
42
  auth_options = { bearer_token_file: '/var/run/secrets/kubernetes.io/serviceaccount/token' }
28
43
 
@@ -37,7 +52,7 @@ module DTK
37
52
 
38
53
  @kubeclient = Kubeclient::Client.new(
39
54
  'https://kubernetes.default.svc/apis/',
40
- 'dtk.io/v1alpha1',
55
+ "dtk.io/#{apiVersion}",
41
56
  auth_options: auth_options,
42
57
  ssl_options: ssl_options,
43
58
  socket_options: socket_options
@@ -1,10 +1,12 @@
1
1
  module DTK
2
2
  module State
3
+ require_relative 'utils'
3
4
  require_relative 'crd_client'
4
5
  require_relative 'state/crd_assembly'
5
6
  require_relative 'state/component'
6
7
  require_relative 'state/component_def'
7
8
  require_relative 'state/executable_action'
8
9
  require_relative 'state/workflow_instance'
10
+ require_relative 'state/workflow'
9
11
  end
10
12
  end
@@ -3,12 +3,14 @@ module DTK::State
3
3
  require_relative 'component/attribute'
4
4
 
5
5
  attr_reader :name, :parent, :component_def
6
+ attr_accessor :raw_attributes
6
7
 
7
8
  def initialize(name, component_content, parent, opts = {})
8
9
  @name = name
9
10
  # @parent = parent
10
11
  @component_defs = parent.references.component_def
11
12
  @component_def = get_component_def(opts)
13
+ @raw_attributes = component_content[:attributes] || {}
12
14
 
13
15
  @attribute_objs = Attribute.create_from_kube_hash(component_content[:attributes] || {})# convert_to_attribute_objects(component_content[:attributes])
14
16
  end
@@ -17,11 +19,50 @@ module DTK::State
17
19
  # task_id
18
20
  def self.get(crd_assembly_namespace, crd_assembly_name, component_name, opts = {})
19
21
  crd_assembly = CrdAssembly.get(crd_assembly_namespace, crd_assembly_name, opts)
20
- if matching_component = crd_assembly.components.find{ |cmp| cmp.to_hash.keys.first.to_s == component_name }
21
- Component.new(component_name, matching_component[component_name], crd_assembly, opts)
22
+ if matching_component = Component.find_matching_component(crd_assembly, component_name)
23
+ Component.new(component_name, Component.get_component_content(matching_component, component_name), crd_assembly, opts)
22
24
  end
23
25
  end
24
26
 
27
+ def self.get_with_influx_data(namespace, assembly_name, component_name, opts = {})
28
+ component = get(namespace, assembly_name, component_name, opts)
29
+ return unless component
30
+ attributes = component.raw_attributes.to_hash
31
+
32
+ attr_type_info = component.component_def.attribute_type_info
33
+ attr_type_info.each do |attr_info|
34
+ if attr_info.temporal
35
+ attribute_name = attr_info.name.to_sym
36
+ influxdb = ::DTK::State::Component::Attribute::Influxdb.new(:attributes)
37
+ influxdb_attribute = influxdb.get(namespace, component_name, assembly_name, attribute_name, opts)
38
+ if valid_attribute = influxdb_attribute.first
39
+ value = valid_attribute['_value']
40
+ if attributes[attribute_name]
41
+ if attributes[attribute_name].is_a?(String)
42
+ attributes[attribute_name] = value
43
+ else
44
+ attributes[attribute_name][:value] = value
45
+ end
46
+ end
47
+ end
48
+ end
49
+ end
50
+
51
+ component.raw_attributes = attributes
52
+ component
53
+ end
54
+
55
+ def to_hash
56
+ {
57
+ name: @name,
58
+ component_def: {
59
+ name: @component_def.name,
60
+ namespace: @component_def.namespace
61
+ },
62
+ attributes: @raw_attributes.to_hash
63
+ }
64
+ end
65
+
25
66
  # opts can have keys
26
67
  # task_id
27
68
  # format:
@@ -44,16 +85,64 @@ module DTK::State
44
85
  end
45
86
  end
46
87
 
88
+ def attribute_metadata
89
+ attributes = @raw_attributes.to_hash
90
+ attr_type_info = @component_def.attribute_type_info
91
+ attribute_metadata = {}
92
+
93
+ attr_type_info.each do |attr_info|
94
+ attr_info_hash = attr_info.to_hash
95
+ attribute_name = attr_info_hash[:name].to_sym
96
+
97
+ if attribute = attributes[attribute_name]
98
+ if attribute.is_a?(String)
99
+ attribute = { value: attribute }
100
+ end
101
+
102
+ attribute_metadata[attribute_name] = attr_info_hash.merge(attribute)
103
+ end
104
+ end
105
+
106
+ attribute_metadata
107
+ end
108
+
47
109
  def attribute_values
48
- attribute_with_values = []
49
- @content[:attributes].each do |name, content|
50
- attribute_with_values << { name => content[:value] }
110
+ attribute_with_values = {}
111
+ @raw_attributes.each do |name, content|
112
+ attribute_with_values.merge!(name => content[:value])
51
113
  end
52
114
  attribute_with_values
53
115
  end
54
116
 
117
+ def self.create_from_kube_array(kube_components, crd_assembly)
118
+ kube_components.map do |component_hash|
119
+ if component_hash.is_a?(String)
120
+ component_name = component_hash
121
+ component_content = {}
122
+ else
123
+ component_name = component_hash.to_hash.keys.first
124
+ component_content = component_hash[component_name]
125
+ end
126
+ Component.new(component_name, component_content, crd_assembly)
127
+ end
128
+ end
129
+
55
130
  private
56
131
 
132
+ def self.get_component_content(matching_component, component_name)
133
+ return matching_component.is_a?(String) ? {} : matching_component[component_name]
134
+ end
135
+
136
+ def self.find_matching_component(assembly, component_name)
137
+ assembly.components.find do |cmp|
138
+ if cmp.is_a? String
139
+ cmp == component_name
140
+ else
141
+ cmp.to_hash.keys.first.to_s == component_name
142
+ end
143
+ end
144
+ end
145
+
57
146
  def get_component_def(opts = {})
58
147
  destructured_component = destructure_component_full_name
59
148
  component_def = @component_defs.find { |component_def| component_def[:name] == destructured_component[:component_def_name] }
@@ -4,6 +4,7 @@ module DTK::State
4
4
  require_relative('influxdb/client')
5
5
  require_relative('influxdb/measurement')
6
6
  require_relative('influxdb/semantictype')
7
+ require_relative('../../../utils/log')
7
8
 
8
9
  attr_reader :client, :measurement
9
10
 
@@ -13,88 +14,51 @@ module DTK::State
13
14
  end
14
15
 
15
16
  def get(namespace, component_name, assembly_name, attribute_name, opts = {})
16
- required_tags = get_required_tags(namespace, component_name, assembly_name, attribute_name)
17
- if opts[:provider] == "correlation"
18
- errors = client.measurement_helper(:errors)
19
- required_tags.merge!({ correlator_type: opts[:entrypoint].split("/").last.split(".")[0] })
20
- errors.get_last_point(required_tags)
21
- elsif
22
- last_value = measurement.get_last_point(required_tags)
23
- last_value
24
- end
17
+ required_tags = measurement.get_required_tags(namespace, component_name, assembly_name, attribute_name)
18
+ required_tags.merge! measurement.get_correlator_type(opts[:entrypoint]) if opts[:provider] == "correlation"
19
+ measurement.get_last_point(required_tags)
20
+ rescue => e
21
+ raise "Error happened while getting attribute from InfluxDB.\nError: #{e}"
25
22
  end
26
23
 
27
24
  def write(namespace, component_name, assembly_name, attribute_name, value, opts = {}, timestamp = nil)
28
- if opts[:provider] == "correlation"
29
- errors = client.measurement_helper(:errors)
30
- required_tags = get_required_tags(namespace, component_name, assembly_name, attribute_name)
31
- required_tags.merge!({ correlator_type: opts[:entrypoint].split("/").last.split(".")[0] })
32
- errors.write(value.to_s, required_tags, timestamp)
33
- elsif
34
- required_tags = get_required_tags(namespace, component_name, assembly_name, attribute_name)
35
- measurement.write(value, required_tags, timestamp)
36
- end
25
+ required_tags = measurement.get_required_tags(namespace, component_name, assembly_name, attribute_name)
26
+ required_tags.merge! measurement.get_correlator_type(opts[:entrypoint]) if opts[:provider] == "correlation"
27
+ measurement.write(value.to_s, required_tags, timestamp)
28
+ rescue => e
29
+ raise "Error happened while writing attribute into InfluxDB.\Error: #{e}"
37
30
  end
38
31
 
39
32
  def write_event(event_id, pod_name, pod_namespace, event_source, event_message, component_name, attribute_name, task_id, timestamp)
40
- begin
41
- fail "Bad timestamp input, write operation wont be completed" if timestamp > Time.new
42
- value_to_write = { event_source: event_source, event_message: event_message }
43
- required_tags = {
44
- event_id: event_id,
45
- pod_name: pod_name,
46
- pod_namespace: pod_namespace,
47
- component_name: component_name,
48
- attribute_name: attribute_name,
49
- task_id: task_id
50
- }
51
- measurement.write(value_to_write.to_s, required_tags, timestamp)
52
- rescue => error
53
- fail error
54
- end
33
+ fail "Bad timestamp input, write operation wont be completed" if timestamp > Time.new
34
+ value_to_write = { event_source: event_source, event_message: event_message }
35
+ required_tags = measurement.get_required_tags(event_id, pod_name, pod_namespace, component_name, attribute_name, task_id)
36
+ measurement.write(value_to_write.to_s, required_tags, timestamp)
37
+ rescue => error
38
+ raise "Error happened while writing event into InfluxDB.\nError: #{e}"
55
39
  end
56
40
 
57
41
  def get_event(event_id, pod_name, pod_namespace, component_name, attribute_name, task_id)
58
- required_tags = {
59
- event_id: event_id,
60
- pod_name: pod_name,
61
- pod_namespace: pod_namespace,
62
- component_name: component_name,
63
- attribute_name: attribute_name,
64
- task_id: task_id
65
- }
42
+ required_tags = measurement.get_required_tags(event_id, pod_name, pod_namespace, component_name, attribute_name, task_id)
66
43
  last_point = measurement.get_last_point(required_tags)
44
+ rescue => e
45
+ raise "Error happened while getting event from InfluxDB.\nError: #{e}"
67
46
  end
68
47
 
69
48
  def write_state(type, name, namespace, object_state, spec, status, component_name, attribute_name, task_id, timestamp)
70
- begin
71
- fail "Bad timestamp input, write operation wont be completed" if timestamp > Time.new
72
- value_to_write = { spec: spec, status: status }
73
- required_tags = {
74
- type: type,
75
- name: name,
76
- namespace: namespace,
77
- object_state: object_state,
78
- component_name: component_name,
79
- attribute_name: attribute_name,
80
- task_id: task_id
81
- }
82
- measurement.write(value_to_write.to_s, required_tags, timestamp)
83
- rescue => error
84
- fail error
85
- end
49
+ raise "Bad timestamp input, write operation to InfluxDB wont be completed" if timestamp > Time.new
50
+ value_to_write = { spec: spec, status: status }
51
+ required_tags = measurement.get_required_tags(type, name, namespace, object_state, component_name, attribute_name, task_id)
52
+ measurement.write(value_to_write.to_s, required_tags, timestamp)
53
+ rescue => e
54
+ raise "Error happened while writing state into InfluxDB.\nError: #{e}"
86
55
  end
87
-
88
- private
89
56
 
90
- def get_required_tags(namespace, component_name, assembly_name, attribute_name)
91
- required_tags = {
92
- namespace: namespace,
93
- component_name: component_name,
94
- assembly_name: assembly_name,
95
- attribute_name: attribute_name,
96
- task_id: "1"
97
- }
57
+ def get_state(type, name, namespace, object_state, component_name, attribute_name, task_id)
58
+ required_tags = measurement.get_required_tags(type, name, namespace, object_state, component_name, attribute_name, task_id)
59
+ measurement.get_last_point(required_tags)
60
+ rescue => e
61
+ raise "Error happened while getting state from InfluxDB.\nError: #{e}"
98
62
  end
99
63
  end
100
64
  end
@@ -10,30 +10,24 @@ module DTK::State
10
10
  end
11
11
 
12
12
  def query(query_expression)
13
- begin
14
- query_api = self.connection.create_query_api
15
- query_api.query(query_expression)
16
- rescue => error
17
- fail "Failed in query"
18
- end
13
+ query_api = self.connection.create_query_api
14
+ query_api.query(query_expression)
15
+ rescue => e
16
+ raise "Failed while processing flux query!. Error: #{e}"
19
17
  end
20
18
 
21
19
  def write_point(data)
22
- begin
23
- write_api = self.connection.create_write_api
24
- write_api.write(data: data)
25
- rescue => error
26
- fail error
27
- end
20
+ write_api = self.connection.create_write_api
21
+ write_api.write(data: data)
22
+ rescue => e
23
+ raise e
28
24
  end
29
25
 
30
26
  def measurement_helper(measurement_name)
31
- begin
32
- klass = Measurement.const_get(measurement_name.to_sym.capitalize)
33
- klass.new(measurement_name, self)
34
- rescue => error
35
- fail error
36
- end
27
+ klass = Measurement.const_get(measurement_name.to_sym.capitalize)
28
+ klass.new(measurement_name, self)
29
+ rescue => e
30
+ raise e
37
31
  end
38
32
 
39
33
  attr_reader :connection_parameters, :connection
@@ -53,18 +47,21 @@ module DTK::State
53
47
  org: params[:org],
54
48
  bucket: params[:bucket]
55
49
  }
50
+ rescue => e
51
+ raise "Problem happened while processing InfluxDB connection parameters. Error: #{e}"
56
52
  end
57
53
 
58
54
  def return_connection(connection_parameters)
59
- begin
60
- InfluxDB2::Client.new(connection_parameters[:url], connection_parameters[:token],
61
- bucket: connection_parameters[:bucket],
62
- org: connection_parameters[:org],
63
- precision: InfluxDB2::WritePrecision::MILLISECOND,
64
- use_ssl: false)
65
- rescue => error
66
- fail "Error: #{error}"
67
- end
55
+ client = InfluxDB2::Client.new(connection_parameters[:url], connection_parameters[:token],
56
+ bucket: connection_parameters[:bucket],
57
+ org: connection_parameters[:org],
58
+ precision: InfluxDB2::WritePrecision::MILLISECOND,
59
+ use_ssl: false)
60
+ query_api = client.create_query_api
61
+ query_api.query(query: 'from(bucket:"' + connection_parameters[:bucket] + '") |> range(start: -5)')
62
+ client
63
+ rescue => e
64
+ raise "Connection with InfluxDB could not be established. #{e}"
68
65
  end
69
66
  end
70
67
  end
@@ -20,14 +20,22 @@ module DTK::State
20
20
  end
21
21
 
22
22
  def get_last_point(params_hash = {})
23
- begin
24
- check_params_hash(params_hash)
25
- flux_query = 'from(bucket:"' + client.connection_parameters[:bucket] + '") |> range(start:-5) |> filter(fn: (r) => r._measurement == "' + name.to_s + '")' + flux_filter(params_hash) + ' |> last()' + '|> drop(columns: ["_start", "_stop", "_field", "_measurement", "attribute_name", "assembly_name", "task_id", "component_name", "namespace"])'
26
- result = self.client.query(query: flux_query)
27
- result.values.map(&:records).flatten.map(&:values)
28
- rescue => error
29
- fail error
30
- end
23
+ check_params_hash(params_hash)
24
+ flux_query = 'from(bucket:"' + client.connection_parameters[:bucket] + '") |> range(start:-5) |> filter(fn: (r) => r._measurement == "' + name.to_s + '")' + flux_filter(params_hash) + ' |> last()' + '|> drop(columns: ["_start", "_stop", "_field", "_measurement", "attribute_name", "assembly_name", "task_id", "component_name", "namespace"])'
25
+ result = self.client.query(query: flux_query)
26
+ result.values.map(&:records).flatten.map(&:values)
27
+ rescue => e
28
+ raise "Failed while getting last attribute point. Error: #{e}"
29
+ end
30
+
31
+ def get_required_tags(namespace, component_name, assembly_name, attribute_name)
32
+ required_tags = {
33
+ namespace: namespace,
34
+ component_name: component_name,
35
+ assembly_name: assembly_name,
36
+ attribute_name: attribute_name,
37
+ task_id: "1"
38
+ }
31
39
  end
32
40
 
33
41
  protected
@@ -90,7 +98,7 @@ module DTK::State
90
98
  fail "Parameter '#{name}' has an illegal type, legal types are #{LEGAL_TAG_CLASSES.join(', ')}"
91
99
  end
92
100
  end
93
-
101
+
94
102
  end
95
103
  end
96
104
  end
@@ -2,7 +2,6 @@ module DTK::State
2
2
  class Component::Attribute::Influxdb
3
3
  class Measurement
4
4
  class Attributes < self
5
-
6
5
  def write(value, params_hash = {}, timestamp)
7
6
  checked_params_hash = check_params_hash(params_hash)
8
7
  write_point(value, checked_params_hash, timestamp)
@@ -11,10 +10,9 @@ module DTK::State
11
10
  protected
12
11
 
13
12
  def required_params
14
- [:namespace, :component_name, :assembly_name, :attribute_name, :task_id]
13
+ %i[namespace component_name assembly_name attribute_name task_id]
15
14
  end
16
-
17
- end
15
+ end
18
16
  end
19
17
  end
20
18
  end
@@ -8,13 +8,18 @@ module DTK::State
8
8
  write_point(value, checked_params_hash, timestamp)
9
9
  end
10
10
 
11
+ def get_correlator_type(entrypoint)
12
+ {
13
+ correlator_type: entrypoint.split('/').last.split('.')[0]
14
+ }
15
+ end
16
+
11
17
  protected
12
18
 
13
19
  def required_params
14
- [:namespace, :component_name, :assembly_name, :task_id, :attribute_name, :task_id, :correlator_type]
20
+ %i[namespace component_name assembly_name task_id attribute_name correlator_type]
15
21
  end
16
-
17
- end
22
+ end
18
23
  end
19
24
  end
20
- end
25
+ end
@@ -8,13 +8,23 @@ module DTK::State
8
8
  write_point(value, checked_params_hash, timestamp)
9
9
  end
10
10
 
11
+ def get_required_tags(event_id, pod_name, pod_namespace, component_name, attribute_name, task_id)
12
+ {
13
+ event_id: event_id,
14
+ pod_name: pod_name,
15
+ pod_namespace: pod_namespace,
16
+ component_name: component_name,
17
+ attribute_name: attribute_name,
18
+ task_id: task_id
19
+ }
20
+ end
21
+
11
22
  protected
12
23
 
13
24
  def required_params
14
- [:event_id, :pod_name, :pod_namespace, :component_name, :attribute_name, :task_id]
25
+ %i[event_id pod_name pod_namespace component_name attribute_name task_id]
15
26
  end
16
-
17
- end
27
+ end
18
28
  end
19
29
  end
20
- end
30
+ end
@@ -8,13 +8,24 @@ module DTK::State
8
8
  write_point(value, checked_params_hash, timestamp)
9
9
  end
10
10
 
11
+ def get_required_tags(type, name, namespace, object_state, component_name, attribute_name, task_id)
12
+ {
13
+ type: type,
14
+ name: name,
15
+ namespace: namespace,
16
+ object_state: object_state,
17
+ component_name: component_name,
18
+ attribute_name: attribute_name,
19
+ task_id: task_id
20
+ }
21
+ end
22
+
11
23
  protected
12
24
 
13
25
  def required_params
14
- [:type, :name, :namespace, :object_state, :component_name, :attribute_name, :task_id]
26
+ %i[type name namespace object_state component_name attribute_name task_id]
15
27
  end
16
-
17
- end
28
+ end
18
29
  end
19
30
  end
20
- end
31
+ end
@@ -2,7 +2,8 @@ module DTK::State
2
2
  class Component::Attribute::Influxdb
3
3
  class SemanticType
4
4
  require_relative './client'
5
- attr_reader :name, :crd_content, :namespace, :client
5
+ require_relative('../../../../utils/log')
6
+ attr_reader :name, :crd_content, :namespace, :client, :expanded_semantictype_spec
6
7
  attr_accessor :content_to_write
7
8
 
8
9
  def initialize(name, namespace)
@@ -11,75 +12,103 @@ module DTK::State
11
12
  @client = Client.new
12
13
  @crd_content = get(name, namespace)
13
14
  @content_to_write = []
14
- end
15
-
16
- # no namespace because semantictype instances are going to be unique in cluster
17
- def get(name, namespace, opts = {})
18
- begin
19
- semantictype = ::DTK::CrdClient.get_kubeclient(opts).get_semantictype(name, namespace)
20
- semantictype.spec[:openAPIV3Schema]
21
- rescue => error
22
- fail "SemanticType attribute with name '#{name}' not found on the cluster!. Error: #{error}"
23
- end
15
+ @expanded_spec = ::DTK::CrdClient.get_kubeclient({}).get_semantictype(name, namespace).expandedSpec
16
+ @expanded_semantictype_spec = expand(crd_content.to_h[:properties], @expanded_spec)
24
17
  end
25
18
 
26
19
  def write_semantictype_inventory(inventory, component_id)
27
- begin
28
- get_influxdb_properties(inventory)
29
- content_to_write.each do |point|
30
- point[:tags].merge!({ component_id: component_id, attribute_name: @name })
31
- @client.write_point({
32
- name: point[:name],
33
- tags: point[:tags],
34
- fields: point[:fields],
35
- time: (Time.new.to_f * 1000).to_i
36
- })
37
- end
38
- "Inventory for attribute #{name} written to InfluxDB"
39
- rescue => error
40
- fail "#{name} inventory write failed. Error: #{error}"
20
+ get_influxdb_properties(inventory)
21
+ content_to_write.each do |point|
22
+ point[:tags].merge!({ component_id: component_id, attribute_name: @name })
23
+ @client.write_point({
24
+ name: point[:name],
25
+ tags: point[:tags],
26
+ fields: point[:fields],
27
+ time: (Time.new.to_f * 1000).to_i
28
+ })
41
29
  end
30
+ rescue => e
31
+ raise "#{name} inventory write failed. Error: #{e}"
42
32
  end
43
33
 
44
34
  def partial_write_update(component_and_attribute, path, field_name, field_value)
45
35
  parent, child = validate_parameter(path)
46
36
  component_id, attribute_name = component_and_attribute.split('/')
47
37
  # getting previous value for given parameters
48
- previous_value = { }
38
+ previous_value = {}
49
39
  begin
50
40
  flux_query = 'from(bucket:"' + @client.connection_parameters[:bucket] + '") |> range(start:-5) |> filter(fn:(r) => r._measurement == "' + attribute_name + "_" + child[:type] + '") |> filter(fn: (r) => r.parent == "' + parent[:name] + '") |> filter(fn: (r) => r.name == "' + child[:name] + '")|> last()'
51
41
  result = @client.query(query: flux_query)
52
42
  previous_value = result.values.map(&:records).flatten.map(&:values)
53
- rescue => error
54
- fail "Partial write could not be completed. Previous point for given parameters not found!"
43
+ rescue => e
44
+ raise "Partial write could not be completed. Previous point for given parameters not found!. Error: #{e}"
55
45
  end
56
46
  update_current(previous_value[0], get_path_to_object(path), field_name, field_value)
57
47
  end
58
48
 
59
49
  private
60
50
 
51
+ def expand(crd_content, expanded_spec)
52
+ if expanded_spec.nil?
53
+ expanded_spec = {}
54
+ populate_semantictypes(crd_content, expanded_spec)
55
+ ::DTK::CrdClient.get_kubeclient({}).merge_patch_semantictype(name, { expandedSpec: expanded_spec }, namespace)
56
+ end
57
+ expanded_spec
58
+ end
59
+
60
+ def get(name, namespace, opts = {})
61
+ semantictype = ::DTK::CrdClient.get_kubeclient(opts).get_semantictype(name, namespace)
62
+ semantictype.spec[:openAPIV3Schema]
63
+ rescue => e
64
+ raise "SemanticType attribute with name '#{name}' not found on the cluster!. Error: #{e.inspect}"
65
+ end
66
+
67
+ def populate_semantictypes(crd, expanded_spec, parent = nil)
68
+ crd.each_pair do |key, value|
69
+ basic, semantictype = value[:type].split(':')
70
+ if semantictype
71
+ expanded_spec[key] = get(basic.to_s, namespace).to_h[:properties]
72
+ crd[key] = get(basic.to_s, namespace).to_h[:properties]
73
+ temporary_expanded_spec = populate_semantictypes(crd[key], expanded_spec[key], basic)
74
+ end
75
+ if basic == 'array'
76
+ basic, semantictype = value[:items][:type].split(':')
77
+ if semantictype
78
+ expanded_spec[key] = get(basic.to_s, namespace).to_h[:properties]
79
+ crd[key] = get(basic.to_s, namespace).to_h[:properties]
80
+ temporary_expanded_spec = populate_semantictypes(crd[key], expanded_spec[key], basic)
81
+ end
82
+ end
83
+ temporary_expanded_spec ||= {}
84
+ unless temporary_expanded_spec.empty? || parent.nil?
85
+ ::DTK::CrdClient.get_kubeclient({}).merge_patch_semantictype(parent, { expandedSpec: { key => temporary_expanded_spec } }, namespace)
86
+ end
87
+ end
88
+ expanded_spec
89
+ end
90
+
61
91
  def update_current(previous_value, path, field_name, field_value)
62
- tags = { }
92
+ tags = {}
63
93
  previous_value.each_pair do |key, value|
64
- tags[key] = value if key[0..0] != "_" && key != "result" && key != "table"
94
+ tags[key] = value if key[0..0] != '_' && key != 'result' && key != 'table'
65
95
  end
66
96
  fields = Hash.new
67
97
  fields[field_name.to_sym] = field_value
68
98
  validate_fields(get_partial_definition(path), fields)
69
99
  @client.write_point({
70
- name: previous_value["_measurement"],
71
- tags: tags,
72
- fields: fields,
73
- time: (Time.new.to_f * 1000).to_i
74
- })
75
- "Partial write update successful"
100
+ name: previous_value['_measurement'],
101
+ tags: tags,
102
+ fields: fields,
103
+ time: (Time.new.to_f * 1000).to_i
104
+ })
76
105
  end
77
106
 
78
107
  def get_influxdb_properties(inventory, parent_type = [:top], parent_name = nil)
79
108
  content_to_write = []
80
109
  properties = { }
81
110
  inventory.each_pair do |key, value|
82
- if value.class.to_s == "Array"
111
+ if value.class.to_s == 'Array'
83
112
  inventory[key].each do |element|
84
113
  get_influxdb_properties(element, parent_type.push(key), inventory[:name])
85
114
  end
@@ -102,12 +131,10 @@ module DTK::State
102
131
  end
103
132
 
104
133
  def validate_request(partial_definition, request)
105
- begin
106
- validate_tags(partial_definition, request[:tags])
107
- validate_fields(partial_definition, request[:fields])
108
- rescue => error
109
- fail error
110
- end
134
+ validate_tags(partial_definition, request[:tags])
135
+ validate_fields(partial_definition, request[:fields])
136
+ rescue => e
137
+ raise e
111
138
  end
112
139
 
113
140
  def get_tags_and_fields(partial_definition, properties)
@@ -115,7 +142,7 @@ module DTK::State
115
142
  fields = { }
116
143
  properties.each_pair do |key, value|
117
144
  if partial_definition[key].nil?
118
- fail "Property '#{key}' not found in the definition of attribute"
145
+ raise "Property '#{key}' not found in the definition of attribute"
119
146
  else
120
147
  if partial_definition[key][:metric].nil? || partial_definition[key][:metric] == false
121
148
  tags[key] = value
@@ -131,14 +158,12 @@ module DTK::State
131
158
  end
132
159
 
133
160
  def validate_fields(partial_definition, fields)
134
-
135
161
  partial_definition.each_pair do |key, value|
136
162
  next if key == :required || value[:metric] == (false || nil)
137
-
138
163
  if fields[key].nil?
139
- fail "Field #{key} is missing. Validation of request failed!"
164
+ raise "Field #{key} is missing. Validation of request failed!"
140
165
  elsif value[:type].capitalize != fields[key].class.to_s
141
- fail "Defined type for SemanticType attribute property '#{key}' is #{value[:type].capitalize}, #{fields[key].class} provided"
166
+ raise "Defined type for SemanticType attribute property '#{key}' is #{value[:type].capitalize}, #{fields[key].class} provided"
142
167
  end
143
168
  end
144
169
  end
@@ -146,20 +171,19 @@ module DTK::State
146
171
  def validate_tags(partial_definition, tags)
147
172
  partial_definition.each_pair do |key, value|
148
173
  next if key == :required || value[:metric] == true
149
-
150
174
  if tags[key].nil?
151
175
  if value[:default].nil?
152
- fail "Property #{key} is missing. Validation of request failed!"
153
- else
176
+ raise "Property #{key} is missing. Validation of request failed!"
177
+ else
154
178
  tags[key] = value[:default]
155
179
  end
156
- else
180
+ else
157
181
  type = tags[key].class
158
- type = "Boolean" if type == TrueClass || type == FalseClass
182
+ type = 'Boolean' if type == TrueClass || type == FalseClass
159
183
  if value[:type].capitalize == type.to_s
160
184
  next
161
185
  else
162
- fail "Defined type for SemanticType attribute property '#{key}' is #{value[:type].capitalize}, #{type} provided"
186
+ raise "Defined type for SemanticType attribute property '#{key}' is #{value[:type].capitalize}, #{type} provided"
163
187
  end
164
188
  end
165
189
  end
@@ -167,24 +191,25 @@ module DTK::State
167
191
 
168
192
  def get_partial_definition(path)
169
193
  i = 0
170
- definition = { }
171
- semantictype_crd = crd_content[:properties]
194
+ definition = {}
195
+ semantictype_crd = crd_content.to_h[:properties]
196
+ expanded_spec = expanded_semantictype_spec.to_h
172
197
  while i < path.length
173
198
  if path[i].to_sym == :top
174
199
  semantictype_crd.each_pair do |key, value|
175
200
  if key == :required
176
201
  definition[key] = value
177
202
  else
178
- definition[key] = value if value[:type] != "array"
203
+ basic, semantictype = value[:type].split(':')
204
+ semantictype.nil? ? definition[key] = value : definition[key] = semantictype_crd[key] if basic != 'array'
179
205
  end
180
206
  end
181
207
  else
182
208
  definition = {}
183
- definition[:required] = semantictype_crd[path[i].to_sym][:items][:required]
184
- semantictype_crd[path[i].to_sym][:items][:properties].each_pair do |key, value|
185
- definition[key] = value if value[:type] != "array"
209
+ expanded_spec[path[i].to_sym].each_pair do |key, value|
210
+ definition[key] = value unless value[:type].nil?
186
211
  end
187
- semantictype_crd = semantictype_crd[path[i].to_sym][:items][:properties]
212
+ expanded_spec = expanded_spec[path[i].to_sym]
188
213
  end
189
214
  i+=1
190
215
  end
@@ -192,7 +217,7 @@ module DTK::State
192
217
  end
193
218
 
194
219
  def get_path_to_object(parameter)
195
- path = ["top"]
220
+ path = ['top']
196
221
  array = parameter.split('/')
197
222
  array.each do |element|
198
223
  path.push(element.split(':')[1])
@@ -202,19 +227,18 @@ module DTK::State
202
227
 
203
228
  def validate_parameter(parameter)
204
229
  array_of_parameters = []
205
- begin
206
- parameter.split('/').each_with_index do |param, index|
207
- name, type = param.split(':')
208
- fail unless name && type
209
- array_of_parameters.push({
210
- name: name,
211
- type: type
212
- })
213
- end
214
- array_of_parameters
215
- rescue => error
216
- fail "Could not resolve parameter '#{parameter}'. It should be in format: 'parent:type/child:type'"
230
+ parameter.split('/').each_with_index do |param, index|
231
+ name, type = param.split(':')
232
+ raise unless name && type
233
+
234
+ array_of_parameters.push({
235
+ name: name,
236
+ type: type
237
+ })
217
238
  end
239
+ array_of_parameters
240
+ rescue => e
241
+ raise "Could not resolve parameter '#{parameter}'. It should be in format: 'parent:type/child:type'"
218
242
  end
219
243
  end
220
244
  end
@@ -2,16 +2,19 @@ module DTK::State
2
2
  class ComponentDef
3
3
  require_relative 'component_def/attribute_type_info'
4
4
 
5
+ COMPONENT_DEF_CRD_VERSION = ENV["COMPONENT_DEF_CRD_VERSION"]
6
+
5
7
  attr_reader :name, :namespace, :executable_actions, :attribute_type_info
6
8
 
7
9
  def initialize(namespace, name, content)
8
10
  @name = name
9
11
  @namespace = namespace
10
- @executable_actions = content[:spec][:actions]
12
+ @executable_actions = content[:spec][:actions] || {}
11
13
  @attribute_type_info = AttributeTypeInfo.create_from_kube_hash(content[:spec][:attributes] || {})
12
14
  end
13
15
 
14
16
  def self.get(namespace, name, opts = {})
17
+ opts[:apiVersion] = COMPONENT_DEF_CRD_VERSION
15
18
  crd_component_def = ::DTK::CrdClient.get_kubeclient(opts).get_componentdef(name, namespace)
16
19
  ComponentDef.new(namespace, name, crd_component_def)
17
20
  end
@@ -19,6 +19,19 @@ module DTK::State
19
19
  end
20
20
  end
21
21
 
22
+ def to_hash
23
+ type_info = {}
24
+
25
+ type_info.merge!(name: @name) if @name
26
+ type_info.merge!(type: @type) if @type
27
+ type_info.merge!(required: @required) if @required
28
+ type_info.merge!(dynamic: @dynamic) if @dynamic
29
+ type_info.merge!(temporal: @temporal) if @temporal
30
+ type_info.merge!(encrypted: @encrypted) if @encrypted
31
+
32
+ type_info
33
+ end
34
+
22
35
  end
23
36
  end
24
37
  end
@@ -1,17 +1,26 @@
1
1
  module DTK::State
2
2
  class CrdAssembly
3
- attr_reader :name, :namespace, :crd_content, :components, :references
3
+ ASSEMBLY_CRD_VERSION = ENV["ASSEMBLY_CRD_VERSION"]
4
+
5
+ include DTK::Utils::CrdHelper
6
+
7
+ attr_reader :name, :namespace, :crd_content, :references, :component_objs
8
+ attr_accessor :components
4
9
 
5
10
  def initialize(namespace, name, crd_content)
6
- @name = name
7
- @namespace = namespace
8
- @crd_content = crd_content
9
- @references = crd_content.references
10
- @components = crd_content[:spec][:components]
11
+ @name = name
12
+ @namespace = namespace
13
+ @api_version = crd_content.apiVersion
14
+ @kind = crd_content.kind
15
+ @metadata = crd_content.metadata
16
+ # @crd_content = crd_content
17
+ @references = crd_content.references
18
+ @components = crd_content[:spec][:components] || []
19
+ # @component_objs = Component.create_from_kube_array(@components, self)
11
20
  end
12
21
 
13
22
  def self.get(namespace, name, opts = {})
14
- # crd_component = ::DTK::CrdClient.instance.kubeclient.get_component(name, namespace)
23
+ opts[:apiVersion] = ASSEMBLY_CRD_VERSION
15
24
  crd_assembly = ::DTK::CrdClient.get_kubeclient(opts).get_assembly(name, namespace)
16
25
  CrdAssembly.new(namespace, name, crd_assembly)
17
26
  end
@@ -50,6 +59,51 @@ module DTK::State
50
59
  output
51
60
  end
52
61
 
62
+ # TODO: this is a temporal solution to avoid breaking backward compatibility; will change this soon
63
+ def self.get_with_influx_data(namespace, assembly_name, opts = {})
64
+ assembly = get(namespace, assembly_name, opts)
65
+
66
+ components_hash = {}
67
+ assembly.components.each do |assembly_component|
68
+ cmp_name = assembly_component.to_hash.keys.first
69
+ components_hash[cmp_name] = assembly_component[cmp_name].to_hash
70
+ end
71
+
72
+ component_objs = Component.create_from_kube_array(assembly.components, assembly)
73
+
74
+ component_objs.each do |component_obj|
75
+ component_name = component_obj.name
76
+ attr_type_info = component_obj.component_def.attribute_type_info
77
+ attr_type_info.each do |attr_info|
78
+ if attr_info.temporal
79
+ attribute_name = attr_info.name
80
+ influxdb = ::DTK::State::Component::Attribute::Influxdb.new(:attributes)
81
+ influxdb_attribute = influxdb.get(namespace, component_name, assembly_name, attribute_name, opts)
82
+ if valid_attribute = influxdb_attribute.first
83
+ value = valid_attribute['_value']
84
+ if components_hash[component_name][:attributes][attribute_name].is_a?(String)
85
+ components_hash[component_name][:attributes][attribute_name] = value
86
+ else
87
+ components_hash[component_name][:attributes][attribute_name][:value] = value
88
+ end
89
+ end
90
+ end
91
+ end
92
+ end
93
+ assembly.components = components_hash
94
+ assembly
95
+ end
96
+
97
+ def to_hash
98
+ {
99
+ apiVersion: @api_version,
100
+ kind: @kind,
101
+ metadata: filter_metadata(@metadata),
102
+ references: @references.to_hash,
103
+ spec: { components: @components.to_hash }
104
+ }
105
+ end
106
+
53
107
  private
54
108
 
55
109
  def self.check_for_missing_components(crd_components, requested_components)
@@ -0,0 +1,26 @@
1
+ module DTK::State
2
+ class Workflow
3
+ require_relative 'workflow_instance/attribute_type_info'
4
+
5
+ WORKFLOW_CRD_VERSION = ENV["WORKFLOW_CRD_VERSION"]
6
+
7
+ attr_reader :name, :namespace, :references, :assembly, :workflow, :attribute_type_info
8
+
9
+ def initialize(namespace, name, crd_content)
10
+ @name = name
11
+ @namespace = namespace
12
+
13
+ @references = crd_content.references
14
+ @assembly = @references.assembly
15
+ @workflow = crd_content.spec.workflow || {}
16
+
17
+ @attribute_type_info = ::DTK::State::WorkflowInstance::AttributeTypeInfo.create_from_kube_hash(crd_content.spec.attributes.to_h || {})
18
+ end
19
+
20
+ def self.get(namespace, name, opts = {})
21
+ opts[:apiVersion] = WORKFLOW_CRD_VERSION
22
+ workflow = ::DTK::CrdClient.get_kubeclient(opts).get_workflow(name, namespace)
23
+ Workflow.new(namespace, name, workflow)
24
+ end
25
+ end
26
+ end
@@ -2,23 +2,77 @@ module DTK::State
2
2
  class WorkflowInstance
3
3
  require_relative 'workflow_instance/attribute_type_info'
4
4
 
5
- attr_reader :name, :namespace, :assembly, :workflow_template, :attributes, :workflow, :attribute_type_info
5
+ WORKFLOW_INSTANCE_CRD_VERSION = ENV["WORKFLOW_INSTANCE_CRD_VERSION"]
6
+
7
+ include DTK::Utils::CrdHelper
8
+
9
+ attr_reader :name, :namespace, :assembly, :workflow_template, :attributes, :attribute_type_info, :status, :workflow
10
+ attr_accessor :workflow
6
11
 
7
12
  def initialize(namespace, name, crd_content)
8
13
  @name = name
9
14
  @namespace = namespace
10
- @assembly = crd_content.references.assembly
11
- @workflow_template = crd_content.references.workflow
12
- @attributes = crd_content.spec.attributes
13
- @workflow = crd_content.spec.workflow
14
- @attribute_type_info = AttributeTypeInfo.create_from_kube_hash(@attributes.to_h || {})
15
+
16
+ @api_version = crd_content.apiVersion
17
+ @kind = crd_content.kind
18
+ @metadata = crd_content.metadata
19
+
20
+ @references = crd_content.references
21
+ @assembly = @references.assembly
22
+ @workflow_template = @references.workflow
23
+
24
+ @attributes = crd_content.spec.attributes || {}
25
+ @status = crd_content.spec.status || {}
26
+ @workflow = crd_content.spec.workflow || {}
15
27
  end
16
28
 
17
29
  def self.get(namespace, name, opts = {})
30
+ opts[:apiVersion] = WORKFLOW_INSTANCE_CRD_VERSION
18
31
  workflow_instance = ::DTK::CrdClient.get_kubeclient(opts).get_workflow_instance(name, namespace)
19
32
  WorkflowInstance.new(namespace, name, workflow_instance)
20
33
  end
21
34
 
35
+ def self.get_with_influx_data(namespace, workflow_instance_name, opts = {})
36
+ workflow_instance = get(namespace, workflow_instance_name, opts)
37
+ return unless workflow_instance
38
+
39
+ workflow_instance.workflow[:subtasks].each do |subtask|
40
+ component_name, action_name = subtask[:component].split('.')
41
+ assembly_name = workflow_instance.assembly[:name]
42
+
43
+ executable_action = ::DTK::State::ExecutableAction.get(namespace, assembly_name, component_name, action_name, opts)
44
+ attr_type_info = executable_action.attribute_type_info
45
+
46
+ attr_type_info.each do |attr_info|
47
+ if attr_info.temporal
48
+ attribute_name = attr_info.name
49
+ influxdb = ::DTK::State::Component::Attribute::Influxdb.new(:attributes)
50
+ influxdb_attribute = influxdb.get(namespace, component_name, assembly_name, attribute_name, opts)
51
+ if valid_attribute = influxdb_attribute.first
52
+ value = valid_attribute['_value']
53
+ subtask[:attributes][attribute_name] = value
54
+ end
55
+ end
56
+ end
57
+ end
58
+
59
+ workflow_instance
60
+ end
61
+
62
+ def to_hash
63
+ {
64
+ apiVersion: @api_version,
65
+ kind: @kind,
66
+ metadata: filter_metadata(@metadata),
67
+ references: @references.to_hash,
68
+ spec: {
69
+ attributes: @attributes.to_hash,
70
+ status: @status.to_hash,
71
+ workflow: @workflow.to_hash
72
+ }
73
+ }
74
+ end
75
+
22
76
  def self.get_attributes(namespace, name, opts = {})
23
77
  workflow_instance = get(namespace, name, opts)
24
78
  workflow_instance.attributes.to_h
@@ -35,6 +89,7 @@ module DTK::State
35
89
  def self.update_action_level_result_attributes(namespace, name, attributes, action_id, opts = {})
36
90
  return "Dynamic attributes do not exist for action with id #{@action_id}, nothing to update" if attributes.nil? || attributes.empty?
37
91
  attributes.delete_if { |key, value| value.nil? || value.to_s.strip == '' }
92
+ opts[:apiVersion] = WORKFLOW_INSTANCE_CRD_VERSION
38
93
  workflow_instance = ::DTK::CrdClient.get_kubeclient(opts).get_workflow_instance(name, namespace)
39
94
  workflow = workflow_instance[:spec][:workflow]
40
95
 
@@ -63,6 +118,7 @@ module DTK::State
63
118
  end
64
119
 
65
120
  def self.update_action_status(namespace, name, parent_id, action_id, status, error_message = "", opts = {})
121
+ opts[:apiVersion] = WORKFLOW_INSTANCE_CRD_VERSION
66
122
  workflow_instance = ::DTK::CrdClient.get_kubeclient(opts).get_workflow_instance(name, namespace)
67
123
  steps = workflow_instance[:spec][:status][:steps]
68
124
  action_index_steps = steps.find_index { |action| action[:id].eql? action_id }
@@ -90,5 +146,38 @@ module DTK::State
90
146
  action
91
147
  end
92
148
 
149
+ def get_workflow_template(opts = {})
150
+ Workflow.get(@workflow_template.namespace, @workflow_template.name, opts)
151
+ end
152
+
153
+ def attribute_metadata
154
+ attributes = @attributes.to_hash
155
+ attr_type_info = get_workflow_template.attribute_type_info
156
+ attribute_metadata = {}
157
+
158
+ attr_type_info.each do |attr_info|
159
+ attr_info_hash = attr_info.to_hash
160
+ attribute_name = attr_info_hash[:name].to_sym
161
+
162
+ if attribute = attributes[attribute_name]
163
+ if attribute.is_a?(String)
164
+ attribute = { value: attribute }
165
+ end
166
+
167
+ attribute_metadata[attribute_name] = attr_info_hash.merge(attribute)
168
+ end
169
+ end
170
+
171
+ attribute_metadata
172
+ end
173
+
174
+ def attribute_values
175
+ attribute_with_values = {}
176
+ @attributes.each_pair do |name, content|
177
+ attribute_with_values.merge!(name => content[:value])
178
+ end
179
+ attribute_with_values
180
+ end
181
+
93
182
  end
94
183
  end
@@ -21,6 +21,19 @@ module DTK::State
21
21
  end
22
22
  end
23
23
 
24
+ def to_hash
25
+ type_info = {}
26
+
27
+ type_info.merge!(name: @name) if @name
28
+ type_info.merge!(type: @type) if @type
29
+ type_info.merge!(required: @required) if @required
30
+ type_info.merge!(dynamic: @dynamic) if @dynamic
31
+ type_info.merge!(temporal: @temporal) if @temporal
32
+ type_info.merge!(encrypted: @encrypted) if @encrypted
33
+
34
+ type_info
35
+ end
36
+
24
37
  end
25
38
  end
26
39
  end
@@ -0,0 +1,6 @@
1
+ module DTK
2
+ module Utils
3
+ require_relative 'utils/log'
4
+ require_relative 'utils/crd_helper'
5
+ end
6
+ end
@@ -0,0 +1,9 @@
1
+ module DTK::Utils
2
+ module CrdHelper
3
+ def filter_metadata(metadata)
4
+ metadata_hash = metadata.to_hash.dup
5
+ metadata_hash[:annotations].delete(:"kubectl.kubernetes.io/last-applied-configuration")
6
+ metadata_hash
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,22 @@
1
+ module DTK::Utils
2
+ module Log
3
+ require 'logger'
4
+
5
+ def self.instance
6
+ @instance ||= Logger.new('/proc/1/fd/1', formatter: proc { |severity, datetime, progname, msg|
7
+ orange_color = "\x1b[33m"
8
+ white_color = "\x1b[37m"
9
+ red_color = "\x1b[31m"
10
+
11
+ date_format = datetime.strftime("%Y-%m-%d %H:%M:%S:%L")
12
+ if severity == "INFO"
13
+ "#{orange_color}[#{date_format}] - #{white_color}#{msg}\n"
14
+ elsif severity == "WARN"
15
+ "#{orange_color}[#{date_format}] [WARNING] - #{msg}\n"
16
+ elsif severity == "ERROR"
17
+ "#{red_color}[#{date_format}] [ERROR] - #{msg}\n"
18
+ end
19
+ })
20
+ end
21
+ end
22
+ end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |spec|
2
2
  spec.name = 'persistence-providers'
3
- spec.version = '0.0.3.7'
3
+ spec.version = '0.0.6'
4
4
  spec.author = 'Reactor8'
5
5
  spec.email = 'support@reactor8.com'
6
6
  spec.description = %q{Persistence providers plugin}
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: persistence-providers
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.3.7
4
+ version: 0.0.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - Reactor8
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-05-14 00:00:00.000000000 Z
11
+ date: 2020-07-24 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: kubeclient
@@ -77,8 +77,12 @@ files:
77
77
  - lib/state/crd_assembly.rb
78
78
  - lib/state/executable_action.rb
79
79
  - lib/state/executable_action/attribute_type_info.rb
80
+ - lib/state/workflow.rb
80
81
  - lib/state/workflow_instance.rb
81
82
  - lib/state/workflow_instance/attribute_type_info.rb
83
+ - lib/utils.rb
84
+ - lib/utils/crd_helper.rb
85
+ - lib/utils/log.rb
82
86
  - persistence-providers.gemspec
83
87
  - test-destroy-influxdb.rb
84
88
  - test-influxdb.rb
@@ -102,7 +106,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
102
106
  - !ruby/object:Gem::Version
103
107
  version: '0'
104
108
  requirements: []
105
- rubygems_version: 3.0.6
109
+ rubygems_version: 3.0.3
106
110
  signing_key:
107
111
  specification_version: 4
108
112
  summary: Persistence providers plugin