openc3 5.20.0 → 6.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/bin/openc3cli +12 -120
- data/data/config/command_modifiers.yaml +13 -1
- data/data/config/interface_modifiers.yaml +21 -4
- data/data/config/item_modifiers.yaml +1 -1
- data/data/config/microservice.yaml +15 -2
- data/data/config/param_item_modifiers.yaml +1 -1
- data/data/config/parameter_modifiers.yaml +1 -1
- data/data/config/table_manager.yaml +2 -2
- data/data/config/target.yaml +11 -0
- data/data/config/telemetry_modifiers.yaml +17 -1
- data/data/config/tool.yaml +12 -0
- data/data/config/widgets.yaml +13 -17
- data/lib/openc3/accessors/form_accessor.rb +4 -3
- data/lib/openc3/accessors/html_accessor.rb +3 -3
- data/lib/openc3/accessors/http_accessor.rb +13 -13
- data/lib/openc3/accessors/xml_accessor.rb +16 -4
- data/lib/openc3/api/target_api.rb +0 -30
- data/lib/openc3/config/config_parser.rb +6 -3
- data/lib/openc3/core_ext/array.rb +0 -16
- data/lib/openc3/core_ext.rb +0 -1
- data/lib/openc3/interfaces/file_interface.rb +198 -0
- data/lib/openc3/interfaces/http_client_interface.rb +71 -39
- data/lib/openc3/interfaces/http_server_interface.rb +0 -7
- data/lib/openc3/interfaces/interface.rb +2 -0
- data/lib/openc3/interfaces/mqtt_interface.rb +32 -15
- data/lib/openc3/interfaces/mqtt_stream_interface.rb +19 -4
- data/lib/openc3/interfaces/protocols/crc_protocol.rb +7 -0
- data/lib/openc3/interfaces/serial_interface.rb +1 -0
- data/lib/openc3/interfaces.rb +2 -4
- data/lib/openc3/microservices/multi_microservice.rb +3 -3
- data/lib/openc3/migrations/20241208080000_no_critical_cmd.rb +31 -0
- data/lib/openc3/migrations/20241208080001_no_trigger_group.rb +46 -0
- data/lib/openc3/models/interface_model.rb +9 -3
- data/lib/openc3/models/microservice_model.rb +8 -1
- data/lib/openc3/models/plugin_model.rb +6 -1
- data/lib/openc3/models/python_package_model.rb +6 -1
- data/lib/openc3/models/reaction_model.rb +14 -10
- data/lib/openc3/models/scope_model.rb +60 -42
- data/lib/openc3/models/target_model.rb +17 -1
- data/lib/openc3/models/timeline_model.rb +17 -5
- data/lib/openc3/models/tool_model.rb +15 -3
- data/lib/openc3/models/trigger_group_model.rb +6 -3
- data/lib/openc3/operators/microservice_operator.rb +8 -0
- data/lib/openc3/packets/commands.rb +17 -6
- data/lib/openc3/packets/limits.rb +0 -12
- data/lib/openc3/packets/packet.rb +1 -1
- data/lib/openc3/packets/packet_item.rb +30 -36
- data/lib/openc3/packets/structure_item.rb +2 -2
- data/lib/openc3/script/script.rb +0 -10
- data/lib/openc3/script/web_socket_api.rb +2 -2
- data/lib/openc3/streams/mqtt_stream.rb +41 -33
- data/lib/openc3/streams/serial_stream.rb +27 -27
- data/lib/openc3/streams/stream.rb +17 -17
- data/lib/openc3/streams/tcpip_client_stream.rb +1 -1
- data/lib/openc3/streams/tcpip_socket_stream.rb +19 -19
- data/lib/openc3/system/system.rb +1 -1
- data/lib/openc3/system.rb +2 -3
- data/lib/openc3/tools/table_manager/table.rb +2 -2
- data/lib/openc3/tools/table_manager/table_parser.rb +1 -1
- data/lib/openc3/top_level.rb +0 -5
- data/lib/openc3/topics/command_decom_topic.rb +0 -7
- data/lib/openc3/utilities/bucket_utilities.rb +1 -1
- data/lib/openc3/utilities/cli_generator.rb +0 -1
- data/lib/openc3/version.rb +6 -6
- data/templates/plugin/README.md +1 -1
- data/templates/target/targets/TARGET/lib/target.rb +1 -1
- data/templates/tool_angular/package.json +8 -8
- data/templates/tool_angular/src/app/app.component.html +4 -13
- data/templates/tool_angular/src/app/app.component.scss +5 -13
- data/templates/tool_angular/src/app/app.component.ts +5 -4
- data/templates/tool_angular/src/app/custom-overlay-container.ts +2 -2
- data/templates/tool_angular/src/app/openc3-api.d.ts +1 -1
- data/templates/tool_angular/src/main.single-spa.ts +1 -1
- data/templates/tool_react/package.json +1 -0
- data/templates/tool_react/src/root.component.js +1 -1
- data/templates/tool_svelte/package.json +11 -9
- data/templates/tool_svelte/rollup.config.js +2 -0
- data/templates/tool_svelte/src/App.svelte +2 -2
- data/templates/tool_vue/eslint.config.mjs +68 -0
- data/templates/tool_vue/jsconfig.json +1 -1
- data/templates/tool_vue/package.json +26 -43
- data/templates/tool_vue/src/App.vue +3 -5
- data/templates/tool_vue/src/main.js +12 -23
- data/templates/tool_vue/src/router.js +19 -18
- data/templates/tool_vue/src/tools/tool_name/tool_name.vue +2 -2
- data/templates/tool_vue/vite.config.js +52 -0
- data/templates/widget/package.json +19 -26
- data/templates/widget/src/Widget.vue +13 -15
- data/templates/widget/vite.config.js +26 -0
- metadata +10 -41
- data/lib/openc3/core_ext/hash.rb +0 -40
- data/lib/openc3/core_ext/httpclient.rb +0 -11
- data/lib/openc3/interfaces/linc_interface.rb +0 -480
- data/lib/openc3/interfaces/protocols/override_protocol.rb +0 -4
- data/lib/openc3/microservices/critical_cmd_microservice.rb +0 -74
- data/lib/openc3/microservices/reaction_microservice.rb +0 -607
- data/lib/openc3/microservices/timeline_microservice.rb +0 -398
- data/lib/openc3/microservices/trigger_group_microservice.rb +0 -698
- data/lib/openc3/migrations/20230615000000_autonomic.rb +0 -86
- data/lib/openc3/migrations/20240915000000_activity_uuid.rb +0 -28
- data/lib/openc3/migrations/20241016000000_scope_critical_cmd.rb +0 -24
- data/lib/openc3/system/system_config.rb +0 -413
- data/templates/tool_svelte/src/services/api.js +0 -92
- data/templates/tool_svelte/src/services/axios.js +0 -85
- data/templates/tool_svelte/src/services/cable.js +0 -65
- data/templates/tool_svelte/src/services/config-parser.js +0 -198
- data/templates/tool_svelte/src/services/openc3-api.js +0 -606
- data/templates/tool_vue/.eslintrc.js +0 -43
- data/templates/tool_vue/babel.config.json +0 -11
- data/templates/tool_vue/vue.config.js +0 -38
- data/templates/widget/.eslintrc.js +0 -43
- data/templates/widget/babel.config.json +0 -11
- data/templates/widget/vue.config.js +0 -28
- /data/templates/tool_vue/{.prettierrc.js → .prettierrc.cjs} +0 -0
- /data/templates/widget/{.prettierrc.js → .prettierrc.cjs} +0 -0
@@ -1,698 +0,0 @@
|
|
1
|
-
# encoding: ascii-8bit
|
2
|
-
|
3
|
-
# Copyright 2022 Ball Aerospace & Technologies Corp.
|
4
|
-
# All Rights Reserved.
|
5
|
-
#
|
6
|
-
# This program is free software; you can modify and/or redistribute it
|
7
|
-
# under the terms of the GNU Affero General Public License
|
8
|
-
# as published by the Free Software Foundation; version 3 with
|
9
|
-
# attribution addendums as found in the LICENSE.txt
|
10
|
-
#
|
11
|
-
# This program is distributed in the hope that it will be useful,
|
12
|
-
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
13
|
-
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
14
|
-
# GNU Affero General Public License for more details.
|
15
|
-
|
16
|
-
# Modified by OpenC3, Inc.
|
17
|
-
# All changes Copyright 2024, OpenC3, Inc.
|
18
|
-
# All Rights Reserved
|
19
|
-
#
|
20
|
-
# This file may also be used under the terms of a commercial license
|
21
|
-
# if purchased from OpenC3, Inc.
|
22
|
-
|
23
|
-
require 'openc3/microservices/microservice'
|
24
|
-
require 'openc3/models/trigger_model'
|
25
|
-
require 'openc3/topics/autonomic_topic'
|
26
|
-
require 'openc3/utilities/authentication'
|
27
|
-
require 'openc3/packets/json_packet'
|
28
|
-
|
29
|
-
require 'openc3/script'
|
30
|
-
|
31
|
-
module OpenC3
|
32
|
-
class TriggerLoopError < TriggerError; end
|
33
|
-
|
34
|
-
# Stored in the TriggerGroupShare this should be a thread safe
|
35
|
-
# hash that triggers will be added, updated, and removed from
|
36
|
-
class PacketBase
|
37
|
-
def initialize(scope:)
|
38
|
-
@scope = scope
|
39
|
-
@mutex = Mutex.new
|
40
|
-
@packets = Hash.new
|
41
|
-
end
|
42
|
-
|
43
|
-
def packet(target:, packet:)
|
44
|
-
topic = "#{@scope}__DECOM__{#{target}}__#{packet}"
|
45
|
-
@mutex.synchronize do
|
46
|
-
return nil unless @packets[topic]
|
47
|
-
# Deep copy the packet so it doesn't change under us
|
48
|
-
return Marshal.load( Marshal.dump(@packets[topic][-1]) )
|
49
|
-
end
|
50
|
-
end
|
51
|
-
|
52
|
-
def previous_packet(target:, packet:)
|
53
|
-
topic = "#{@scope}__DECOM__{#{target}}__#{packet}"
|
54
|
-
@mutex.synchronize do
|
55
|
-
return nil unless @packets[topic] and @packets[topic].length == 2
|
56
|
-
# Deep copy the packet so it doesn't change under us
|
57
|
-
return Marshal.load( Marshal.dump(@packets[topic][0]) )
|
58
|
-
end
|
59
|
-
end
|
60
|
-
|
61
|
-
def add(topic:, packet:)
|
62
|
-
@mutex.synchronize do
|
63
|
-
@packets[topic] ||= []
|
64
|
-
if @packets[topic].length == 2
|
65
|
-
@packets[topic].shift
|
66
|
-
end
|
67
|
-
@packets[topic].push(packet)
|
68
|
-
end
|
69
|
-
end
|
70
|
-
|
71
|
-
def remove(topic:)
|
72
|
-
@mutex.synchronize do
|
73
|
-
@packets.delete(topic)
|
74
|
-
end
|
75
|
-
end
|
76
|
-
end
|
77
|
-
|
78
|
-
# Stored in the TriggerGroupShare this should be a thread safe
|
79
|
-
# hash that triggers will be added, updated, and removed from.
|
80
|
-
class TriggerBase
|
81
|
-
attr_reader :autonomic_topic, :triggers
|
82
|
-
|
83
|
-
def initialize(scope:)
|
84
|
-
@scope = scope
|
85
|
-
@autonomic_topic = "#{@scope}__openc3_autonomic".freeze
|
86
|
-
@triggers_mutex = Mutex.new
|
87
|
-
@triggers = Hash.new
|
88
|
-
@lookup_mutex = Mutex.new
|
89
|
-
@lookup = Hash.new
|
90
|
-
end
|
91
|
-
|
92
|
-
# Get triggers to evaluate based on the topic. If the
|
93
|
-
# topic is equal to the autonomic topic it will
|
94
|
-
# return only triggers with roots
|
95
|
-
def get_triggers(topic:)
|
96
|
-
if @autonomic_topic == topic
|
97
|
-
return triggers_with_roots()
|
98
|
-
else
|
99
|
-
return triggers_from(topic: topic)
|
100
|
-
end
|
101
|
-
end
|
102
|
-
|
103
|
-
# update trigger state after evaluated
|
104
|
-
# -1 (the value is considered an error used to disable the trigger)
|
105
|
-
# 0 (the value is considered as a false value)
|
106
|
-
# 1 (the value is considered as a true value)
|
107
|
-
def update_state(name:, value:)
|
108
|
-
@triggers_mutex.synchronize do
|
109
|
-
data = @triggers[name]
|
110
|
-
return unless data
|
111
|
-
trigger = TriggerModel.from_json(data, name: data['name'], scope: data['scope'])
|
112
|
-
if value == -1 && trigger.enabled
|
113
|
-
trigger.disable()
|
114
|
-
elsif value == 1 && trigger.state == false
|
115
|
-
trigger.state = true
|
116
|
-
elsif value == 0 && trigger.state == true
|
117
|
-
trigger.state = false
|
118
|
-
end
|
119
|
-
@triggers[name] = trigger.as_json(:allow_nan => true)
|
120
|
-
end
|
121
|
-
end
|
122
|
-
|
123
|
-
# returns a Hash of ALL enabled Trigger objects
|
124
|
-
def enabled_triggers
|
125
|
-
val = nil
|
126
|
-
@triggers_mutex.synchronize do
|
127
|
-
val = Marshal.load( Marshal.dump(@triggers) )
|
128
|
-
end
|
129
|
-
ret = Hash.new
|
130
|
-
val.each do | name, data |
|
131
|
-
trigger = TriggerModel.from_json(data, name: data['name'], scope: data['scope'])
|
132
|
-
ret[name] = trigger if trigger.enabled
|
133
|
-
end
|
134
|
-
return ret
|
135
|
-
end
|
136
|
-
|
137
|
-
# returns an Array of enabled Trigger objects that have roots to other triggers
|
138
|
-
def triggers_with_roots
|
139
|
-
val = nil
|
140
|
-
@triggers_mutex.synchronize do
|
141
|
-
val = Marshal.load( Marshal.dump(@triggers) )
|
142
|
-
end
|
143
|
-
ret = []
|
144
|
-
val.each do | _name, data |
|
145
|
-
trigger = TriggerModel.from_json(data, name: data['name'], scope: data['scope'])
|
146
|
-
ret << trigger if trigger.enabled && ! trigger.roots.empty?
|
147
|
-
end
|
148
|
-
return ret
|
149
|
-
end
|
150
|
-
|
151
|
-
# returns an Array of enabled Trigger objects that use a topic
|
152
|
-
def triggers_from(topic:)
|
153
|
-
val = nil
|
154
|
-
@lookup_mutex.synchronize do
|
155
|
-
val = Marshal.load( Marshal.dump(@lookup[topic]) )
|
156
|
-
end
|
157
|
-
return [] if val.nil?
|
158
|
-
ret = []
|
159
|
-
@triggers_mutex.synchronize do
|
160
|
-
val.each do | trigger_name |
|
161
|
-
data = Marshal.load( Marshal.dump(@triggers[trigger_name]) )
|
162
|
-
trigger = TriggerModel.from_json(data, name: data['name'], scope: data['scope'])
|
163
|
-
ret << trigger if trigger.enabled
|
164
|
-
end
|
165
|
-
end
|
166
|
-
return ret
|
167
|
-
end
|
168
|
-
|
169
|
-
# get all topics group is working with
|
170
|
-
def topics
|
171
|
-
@lookup_mutex.synchronize do
|
172
|
-
return Marshal.load( Marshal.dump(@lookup.keys()) )
|
173
|
-
end
|
174
|
-
end
|
175
|
-
|
176
|
-
# Rebuild the database lookup of all triggers in the group
|
177
|
-
def rebuild(triggers:)
|
178
|
-
@triggers_mutex.synchronize do
|
179
|
-
@triggers = Marshal.load( Marshal.dump(triggers) )
|
180
|
-
end
|
181
|
-
@lookup_mutex.synchronize do
|
182
|
-
@lookup = { @autonomic_topic => [] }
|
183
|
-
triggers.each do | _name, data |
|
184
|
-
trigger = TriggerModel.from_json(data, name: data['name'], scope: data['scope'])
|
185
|
-
trigger.generate_topics.each do | topic |
|
186
|
-
@lookup[topic] ||= []
|
187
|
-
@lookup[topic] << trigger.name
|
188
|
-
end
|
189
|
-
end
|
190
|
-
end
|
191
|
-
end
|
192
|
-
|
193
|
-
# Add a trigger from TriggerBase, must only be called once per trigger
|
194
|
-
def add(trigger:)
|
195
|
-
@triggers_mutex.synchronize do
|
196
|
-
@triggers[trigger['name']] = Marshal.load( Marshal.dump(trigger) )
|
197
|
-
end
|
198
|
-
trigger = TriggerModel.from_json(trigger, name: trigger['name'], scope: trigger['scope'])
|
199
|
-
@lookup_mutex.synchronize do
|
200
|
-
trigger.generate_topics.each do | topic |
|
201
|
-
@lookup[topic] ||= []
|
202
|
-
@lookup[topic] << trigger.name
|
203
|
-
end
|
204
|
-
end
|
205
|
-
end
|
206
|
-
|
207
|
-
# update a trigger from TriggerBase
|
208
|
-
def update(trigger:)
|
209
|
-
@triggers_mutex.synchronize do
|
210
|
-
model = TriggerModel.from_json(trigger, name: trigger['name'], scope: trigger['scope'])
|
211
|
-
model.update()
|
212
|
-
@triggers[trigger['name']] = model.as_json(:allow_nan => true)
|
213
|
-
end
|
214
|
-
end
|
215
|
-
|
216
|
-
# remove a trigger from TriggerBase
|
217
|
-
def remove(trigger:)
|
218
|
-
topics = []
|
219
|
-
@triggers_mutex.synchronize do
|
220
|
-
@triggers.delete(trigger['name'])
|
221
|
-
model = TriggerModel.from_json(trigger, name: trigger['name'], scope: trigger['scope'])
|
222
|
-
topics = model.generate_topics()
|
223
|
-
TriggerModel.delete(name: trigger['name'], group: trigger['group'], scope: trigger['scope'])
|
224
|
-
end
|
225
|
-
@lookup_mutex.synchronize do
|
226
|
-
topics.each do | topic |
|
227
|
-
unless @lookup[topic].nil?
|
228
|
-
@lookup[topic].delete(trigger['name'])
|
229
|
-
@lookup.delete(topic) if @lookup[topic].empty?
|
230
|
-
end
|
231
|
-
end
|
232
|
-
end
|
233
|
-
end
|
234
|
-
end
|
235
|
-
|
236
|
-
# Shared between the monitor thread and the manager thread to
|
237
|
-
# share the triggers. This should remain a thread
|
238
|
-
# safe implementation.
|
239
|
-
class TriggerGroupShare
|
240
|
-
attr_reader :trigger_base, :packet_base
|
241
|
-
|
242
|
-
def initialize(scope:)
|
243
|
-
@scope = scope
|
244
|
-
@trigger_base = TriggerBase.new(scope: scope)
|
245
|
-
@packet_base = PacketBase.new(scope: scope)
|
246
|
-
end
|
247
|
-
end
|
248
|
-
|
249
|
-
# The TriggerGroupWorker is a very simple thread pool worker. Once
|
250
|
-
# the trigger manager has pushed a packet to the queue one of
|
251
|
-
# these workers will evaluate the triggers for that packet.
|
252
|
-
class TriggerGroupWorker
|
253
|
-
TYPE = 'type'.freeze
|
254
|
-
ITEM_TARGET = 'target'.freeze
|
255
|
-
ITEM_PACKET = 'packet'.freeze
|
256
|
-
ITEM_TYPE = 'item'.freeze
|
257
|
-
ITEM_VALUE_TYPE = 'valueType'.freeze
|
258
|
-
FLOAT_TYPE = 'float'.freeze
|
259
|
-
STRING_TYPE = 'string'.freeze
|
260
|
-
REGEX_TYPE = 'regex'.freeze
|
261
|
-
LIMIT_TYPE = 'limit'.freeze
|
262
|
-
TRIGGER_TYPE = 'trigger'.freeze
|
263
|
-
|
264
|
-
attr_reader :name, :scope, :target, :packet, :group
|
265
|
-
|
266
|
-
def initialize(name:, logger:, scope:, group:, queue:, share:, ident:)
|
267
|
-
@name = name
|
268
|
-
@logger = logger
|
269
|
-
@scope = scope
|
270
|
-
@group = group
|
271
|
-
@queue = queue
|
272
|
-
@share = share
|
273
|
-
@ident = ident
|
274
|
-
end
|
275
|
-
|
276
|
-
def notify(name:, severity:, message:)
|
277
|
-
data = {}
|
278
|
-
# All AutonomicTopic notifications must have 'name' and 'updated_at' in the data
|
279
|
-
data['name'] = name
|
280
|
-
data['updated_at'] = Time.now.to_nsec_from_epoch
|
281
|
-
data['severity'] = severity
|
282
|
-
data['message'] = message
|
283
|
-
notification = {
|
284
|
-
'kind' => 'error',
|
285
|
-
'type' => 'trigger',
|
286
|
-
'data' => JSON.generate(data),
|
287
|
-
}
|
288
|
-
AutonomicTopic.write_notification(notification, scope: @scope)
|
289
|
-
@logger.public_send(severity.intern, message)
|
290
|
-
end
|
291
|
-
|
292
|
-
def run
|
293
|
-
@logger.info "TriggerGroupWorker-#{@ident} running"
|
294
|
-
loop do
|
295
|
-
topic = @queue.pop
|
296
|
-
break if topic.nil?
|
297
|
-
begin
|
298
|
-
evaluate_data_packet(topic: topic)
|
299
|
-
rescue StandardError => e
|
300
|
-
@logger.error "TriggerGroupWorker-#{@ident} failed to evaluate data packet from topic: #{topic}\n#{e.formatted}"
|
301
|
-
end
|
302
|
-
end
|
303
|
-
@logger.info "TriggerGroupWorker-#{@ident} exiting"
|
304
|
-
end
|
305
|
-
|
306
|
-
# Each packet will be evaluated to all triggers and use the result to send
|
307
|
-
# the results back to the topic to be used by the reaction microservice.
|
308
|
-
def evaluate_data_packet(topic:)
|
309
|
-
visited = Hash.new
|
310
|
-
@logger.debug "TriggerGroupWorker-#{@ident} topic: #{topic}"
|
311
|
-
@share.trigger_base.get_triggers(topic: topic).each do |trigger|
|
312
|
-
@logger.debug "TriggerGroupWorker-#{@ident} eval head: #{trigger}"
|
313
|
-
value = evaluate_trigger(
|
314
|
-
head: trigger,
|
315
|
-
trigger: trigger,
|
316
|
-
visited: visited,
|
317
|
-
triggers: @share.trigger_base.enabled_triggers
|
318
|
-
)
|
319
|
-
@logger.debug "TriggerGroupWorker-#{@ident} trigger: #{trigger} value: #{value}"
|
320
|
-
# value MUST be -1, 0, or 1
|
321
|
-
@share.trigger_base.update_state(name: trigger.name, value: value)
|
322
|
-
end
|
323
|
-
end
|
324
|
-
|
325
|
-
# extract the value outlined in the operand to get the packet item limit
|
326
|
-
# IF operand limit does not include _LOW or _HIGH this will match the
|
327
|
-
# COLOR and return COLOR_LOW || COLOR_HIGH
|
328
|
-
# operand item: GREEN_LOW == other operand limit: GREEN
|
329
|
-
def get_packet_limit(operand:, other:)
|
330
|
-
packet = @share.packet_base.packet(
|
331
|
-
target: operand[ITEM_TARGET],
|
332
|
-
packet: operand[ITEM_PACKET]
|
333
|
-
)
|
334
|
-
return nil if packet.nil?
|
335
|
-
_, limit = packet.read_with_limits_state(operand[ITEM_TYPE], operand[ITEM_VALUE_TYPE].intern)
|
336
|
-
# Convert limit symbol to string since we'll be comparing with strings
|
337
|
-
return limit.to_s
|
338
|
-
end
|
339
|
-
|
340
|
-
# extract the value outlined in the operand to get the packet item value
|
341
|
-
# IF raw in operand it will pull the raw value over the converted
|
342
|
-
def get_packet_value(operand:, previous:)
|
343
|
-
if previous
|
344
|
-
packet = @share.packet_base.previous_packet(
|
345
|
-
target: operand[ITEM_TARGET],
|
346
|
-
packet: operand[ITEM_PACKET]
|
347
|
-
)
|
348
|
-
# Previous might not be populated ... that's ok just return nil
|
349
|
-
return nil unless packet
|
350
|
-
else
|
351
|
-
packet = @share.packet_base.packet(
|
352
|
-
target: operand[ITEM_TARGET],
|
353
|
-
packet: operand[ITEM_PACKET]
|
354
|
-
)
|
355
|
-
end
|
356
|
-
# This shouldn't happen because the frontend provides valid items but good to check
|
357
|
-
# The raise is ultimately rescued inside evaluate_trigger when operand_value is called
|
358
|
-
if packet.nil?
|
359
|
-
raise "Packet #{operand[ITEM_TARGET]} #{operand[ITEM_PACKET]} not found"
|
360
|
-
end
|
361
|
-
value = packet.read(operand[ITEM_TYPE], operand[ITEM_VALUE_TYPE].intern)
|
362
|
-
if value.nil?
|
363
|
-
raise "Item #{operand[ITEM_TARGET]} #{operand[ITEM_PACKET]} #{operand[ITEM_TYPE]} not found"
|
364
|
-
end
|
365
|
-
value
|
366
|
-
end
|
367
|
-
|
368
|
-
# extract the value of the operand from the packet
|
369
|
-
def operand_value(operand:, other:, visited:, previous: false)
|
370
|
-
if operand[TYPE] == ITEM_TYPE && other && other[TYPE] == LIMIT_TYPE
|
371
|
-
return get_packet_limit(operand: operand, other: other)
|
372
|
-
elsif operand[TYPE] == ITEM_TYPE
|
373
|
-
return get_packet_value(operand: operand, previous: previous)
|
374
|
-
elsif operand[TYPE] == TRIGGER_TYPE
|
375
|
-
return visited["#{operand[TRIGGER_TYPE]}__R"] == 1
|
376
|
-
elsif operand[TYPE] == FLOAT_TYPE
|
377
|
-
return operand[operand[TYPE]].to_f
|
378
|
-
elsif operand[TYPE] == STRING_TYPE
|
379
|
-
return operand[operand[TYPE]].to_s
|
380
|
-
elsif operand[TYPE] == REGEX_TYPE
|
381
|
-
# This can potentially throw an exception on badly formatted Regexp
|
382
|
-
return Regexp.new(operand[operand[TYPE]])
|
383
|
-
elsif operand[TYPE] == LIMIT_TYPE
|
384
|
-
return operand[operand[TYPE]]
|
385
|
-
else
|
386
|
-
# This is a logic error ... should never get here
|
387
|
-
raise "Unknown operand type: #{operand}"
|
388
|
-
end
|
389
|
-
end
|
390
|
-
|
391
|
-
# the base evaluate method used by evaluate_trigger
|
392
|
-
# -1 (the value is considered an error used to disable the trigger)
|
393
|
-
# 0 (the value is considered as a false value)
|
394
|
-
# 1 (the value is considered as a true value)
|
395
|
-
#
|
396
|
-
def evaluate(name:, left:, operator:, right:)
|
397
|
-
@logger.debug "TriggerGroupWorker-#{@ident} evaluate: (#{left}(#{left.class}) #{operator} #{right}(#{right.class}))"
|
398
|
-
begin
|
399
|
-
case operator
|
400
|
-
when '>'
|
401
|
-
return left > right ? 1 : 0
|
402
|
-
when '<'
|
403
|
-
return left < right ? 1 : 0
|
404
|
-
when '>='
|
405
|
-
return left >= right ? 1 : 0
|
406
|
-
when '<='
|
407
|
-
return left <= right ? 1 : 0
|
408
|
-
when '!=', 'CHANGES'
|
409
|
-
return left != right ? 1 : 0
|
410
|
-
when '==', 'DOES NOT CHANGE'
|
411
|
-
return left == right ? 1 : 0
|
412
|
-
when '!~'
|
413
|
-
return left !~ right ? 1 : 0
|
414
|
-
when '=~'
|
415
|
-
return left =~ right ? 1 : 0
|
416
|
-
when 'AND'
|
417
|
-
return left && right ? 1 : 0
|
418
|
-
when 'OR'
|
419
|
-
return left || right ? 1 : 0
|
420
|
-
end
|
421
|
-
rescue ArgumentError
|
422
|
-
message = "invalid evaluate: (#{left} #{operator} #{right})"
|
423
|
-
notify(name: name, severity: 'error', message: message)
|
424
|
-
return -1
|
425
|
-
end
|
426
|
-
end
|
427
|
-
|
428
|
-
# This could be confusing... So this is a recursive method for the
|
429
|
-
# TriggerGroupWorkers to call. It will use the trigger name and append a
|
430
|
-
# __P for path or __R for result. The Path is a Hash that contains
|
431
|
-
# a key for each node traveled to get results. When the result has
|
432
|
-
# been found it will be stored in the result key __R in the visited Hash
|
433
|
-
# and eval_trigger will return a number.
|
434
|
-
# -1 (the value is considered an error used to disable the trigger)
|
435
|
-
# 0 (the value is considered as a false value)
|
436
|
-
# 1 (the value is considered as a true value)
|
437
|
-
#
|
438
|
-
# IF an operand is evaluated as nil it will log an error and return -1
|
439
|
-
# IF a loop is detected it will log an error and return -1
|
440
|
-
def evaluate_trigger(head:, trigger:, visited:, triggers:)
|
441
|
-
if visited["#{trigger.name}__R"]
|
442
|
-
return visited["#{trigger.name}__R"]
|
443
|
-
end
|
444
|
-
if visited["#{trigger.name}__P"].nil?
|
445
|
-
visited["#{trigger.name}__P"] = Hash.new
|
446
|
-
end
|
447
|
-
if visited["#{head.name}__P"][trigger.name]
|
448
|
-
# Not sure if this is possible as on create it validates that the dependents are already created
|
449
|
-
message = "loop detected from #{head.name} -> #{trigger.name} path: #{visited["#{head.name}__P"]}"
|
450
|
-
notify(name: trigger.name, severity: 'error', message: message)
|
451
|
-
return visited["#{trigger.name}__R"] = -1
|
452
|
-
end
|
453
|
-
trigger.roots.each do | root_trigger_name |
|
454
|
-
next if visited["#{root_trigger_name}__R"]
|
455
|
-
root_trigger = triggers[root_trigger_name]
|
456
|
-
if head.name == root_trigger.name
|
457
|
-
message = "loop detected from #{head.name} -> #{root_trigger_name} path: #{visited["#{head.name}__P"]}"
|
458
|
-
notify(name: trigger.name, severity: 'error', message: message)
|
459
|
-
return visited["#{trigger.name}__R"] = -1
|
460
|
-
end
|
461
|
-
result = evaluate_trigger(
|
462
|
-
head: head,
|
463
|
-
trigger: root_trigger,
|
464
|
-
visited: visited,
|
465
|
-
triggers: triggers
|
466
|
-
)
|
467
|
-
@logger.debug "TriggerGroupWorker-#{@ident} #{root_trigger.name} result: #{result}"
|
468
|
-
visited["#{root_trigger.name}__R"] = visited["#{head.name}__P"][root_trigger.name] = result
|
469
|
-
end
|
470
|
-
begin
|
471
|
-
left = operand_value(operand: trigger.left, other: trigger.right, visited: visited)
|
472
|
-
if trigger.operator.include?('CHANGE')
|
473
|
-
right = operand_value(operand: trigger.left, other: trigger.right, visited: visited, previous: true)
|
474
|
-
else
|
475
|
-
right = operand_value(operand: trigger.right, other: trigger.left, visited: visited)
|
476
|
-
end
|
477
|
-
rescue => e
|
478
|
-
# This will primarily happen when the user inputs a bad Regexp
|
479
|
-
notify(name: trigger.name, severity: 'error', message: e.message)
|
480
|
-
return visited["#{trigger.name}__R"] = -1
|
481
|
-
end
|
482
|
-
# Convert the standard '==' and '!=' into Ruby Regexp operators
|
483
|
-
operator = trigger.operator
|
484
|
-
if right and right.is_a? Regexp
|
485
|
-
operator = '=~' if operator == '=='
|
486
|
-
operator = '!~' if operator == '!='
|
487
|
-
end
|
488
|
-
if left.nil? || right.nil?
|
489
|
-
return visited["#{trigger.name}__R"] = 0
|
490
|
-
end
|
491
|
-
result = evaluate(name: trigger.name,left: left, operator: operator, right: right)
|
492
|
-
return visited["#{trigger.name}__R"] = result
|
493
|
-
end
|
494
|
-
end
|
495
|
-
|
496
|
-
# The trigger manager starts a thread pool and subscribes
|
497
|
-
# to the telemetry decom topic. It adds the "packet" to the thread pool queue
|
498
|
-
# and the thread will evaluate the "trigger".
|
499
|
-
class TriggerGroupManager
|
500
|
-
attr_reader :name, :scope, :share, :group, :topics, :thread_pool
|
501
|
-
|
502
|
-
def initialize(name:, logger:, scope:, group:, share:)
|
503
|
-
@name = name
|
504
|
-
@logger = logger
|
505
|
-
@scope = scope
|
506
|
-
@group = group
|
507
|
-
@share = share
|
508
|
-
@worker_count = 3
|
509
|
-
@queue = Queue.new
|
510
|
-
@read_topic = true
|
511
|
-
@topics = []
|
512
|
-
@thread_pool = nil
|
513
|
-
@cancel_thread = false
|
514
|
-
end
|
515
|
-
|
516
|
-
def generate_thread_pool()
|
517
|
-
thread_pool = []
|
518
|
-
@worker_count.times do | i |
|
519
|
-
worker = TriggerGroupWorker.new(
|
520
|
-
name: @name,
|
521
|
-
logger: @logger,
|
522
|
-
scope: @scope,
|
523
|
-
group: @group,
|
524
|
-
queue: @queue,
|
525
|
-
share: @share,
|
526
|
-
ident: i,
|
527
|
-
)
|
528
|
-
thread_pool << Thread.new { worker.run }
|
529
|
-
end
|
530
|
-
return thread_pool
|
531
|
-
end
|
532
|
-
|
533
|
-
def run
|
534
|
-
@logger.info "TriggerGroupManager running"
|
535
|
-
@thread_pool = generate_thread_pool()
|
536
|
-
loop do
|
537
|
-
begin
|
538
|
-
update_topics()
|
539
|
-
rescue StandardError => e
|
540
|
-
@logger.error "TriggerGroupManager failed to update topics.\n#{e.formatted}"
|
541
|
-
end
|
542
|
-
break if @cancel_thread
|
543
|
-
block_for_updates()
|
544
|
-
break if @cancel_thread
|
545
|
-
end
|
546
|
-
@logger.info "TriggerGroupManager exiting"
|
547
|
-
end
|
548
|
-
|
549
|
-
def update_topics
|
550
|
-
past_topics = @topics
|
551
|
-
@topics = @share.trigger_base.topics()
|
552
|
-
@logger.debug "TriggerGroupManager past_topics: #{past_topics} topics: #{@topics}"
|
553
|
-
(past_topics - @topics).each do | removed_topic |
|
554
|
-
@share.packet_base.remove(topic: removed_topic)
|
555
|
-
end
|
556
|
-
end
|
557
|
-
|
558
|
-
def block_for_updates
|
559
|
-
@read_topic = true
|
560
|
-
while @read_topic
|
561
|
-
begin
|
562
|
-
Topic.read_topics(@topics) do |topic, _msg_id, msg_hash, _redis|
|
563
|
-
@logger.debug "TriggerGroupManager block_for_updates: #{topic} #{msg_hash}"
|
564
|
-
if topic != @share.trigger_base.autonomic_topic
|
565
|
-
packet = JsonPacket.new(:TLM, msg_hash['target_name'], msg_hash['packet_name'], msg_hash['time'].to_i, false, msg_hash["json_data"])
|
566
|
-
@share.packet_base.add(topic: topic, packet: packet)
|
567
|
-
end
|
568
|
-
@queue << "#{topic}"
|
569
|
-
end
|
570
|
-
rescue StandardError => e
|
571
|
-
@logger.error "TriggerGroupManager failed to read topics #{@topics}\n#{e.formatted}"
|
572
|
-
end
|
573
|
-
end
|
574
|
-
end
|
575
|
-
|
576
|
-
def refresh
|
577
|
-
@read_topic = false
|
578
|
-
end
|
579
|
-
|
580
|
-
def shutdown
|
581
|
-
@read_topic = false
|
582
|
-
@cancel_thread = true
|
583
|
-
@worker_count.times do | _i |
|
584
|
-
@queue << nil
|
585
|
-
end
|
586
|
-
end
|
587
|
-
end
|
588
|
-
|
589
|
-
# The trigger microservice starts a manager then gets the activities
|
590
|
-
# from the sorted set in redis and updates the schedule for the
|
591
|
-
# manager. Timeline will then wait for an update on the timeline
|
592
|
-
# stream this will trigger an update again to the schedule.
|
593
|
-
class TriggerGroupMicroservice < Microservice
|
594
|
-
attr_reader :name, :scope, :share, :group, :manager, :manager_thread
|
595
|
-
# This lookup is mapping all the different trigger notifications
|
596
|
-
# which are primarily sent by notify in TriggerModel
|
597
|
-
TOPIC_LOOKUP = {
|
598
|
-
'error' => :no_op, # Sent by TriggerGroupWorker
|
599
|
-
'created' => :created_trigger_event,
|
600
|
-
'updated' => :rebuild_trigger_event,
|
601
|
-
'deleted' => :deleted_trigger_event,
|
602
|
-
'enabled' => :updated_trigger_event,
|
603
|
-
'disabled' => :updated_trigger_event,
|
604
|
-
'true' => :no_op, # Sent by TriggerGroupWorker
|
605
|
-
'false' => :no_op, # Sent by TriggerGroupWorker
|
606
|
-
}
|
607
|
-
|
608
|
-
def initialize(*args)
|
609
|
-
super(*args)
|
610
|
-
# The name is passed in via the trigger_group_model as "#{scope}__TRIGGER_GROUP__#{name}"
|
611
|
-
@group = @name.split('__')[2]
|
612
|
-
@share = TriggerGroupShare.new(scope: @scope)
|
613
|
-
@manager = TriggerGroupManager.new(name: @name, logger: @logger, scope: @scope, group: @group, share: @share)
|
614
|
-
@manager_thread = nil
|
615
|
-
@read_topic = true
|
616
|
-
end
|
617
|
-
|
618
|
-
def run
|
619
|
-
@logger.info "TriggerGroupMicroservice running"
|
620
|
-
@manager_thread = Thread.new { @manager.run }
|
621
|
-
loop do
|
622
|
-
triggers = TriggerModel.all(scope: @scope, group: @group)
|
623
|
-
@share.trigger_base.rebuild(triggers: triggers)
|
624
|
-
@manager.refresh() # Every time we do a full base update we refresh the manager
|
625
|
-
break if @cancel_thread
|
626
|
-
block_for_updates()
|
627
|
-
break if @cancel_thread
|
628
|
-
end
|
629
|
-
@logger.info "TriggerGroupMicroservice exiting"
|
630
|
-
end
|
631
|
-
|
632
|
-
def block_for_updates
|
633
|
-
@read_topic = true
|
634
|
-
while @read_topic && !@cancel_thread
|
635
|
-
begin
|
636
|
-
AutonomicTopic.read_topics(@topics) do |_topic, _msg_id, msg_hash, _redis|
|
637
|
-
break if @cancel_thread
|
638
|
-
@logger.debug "TriggerGroupMicroservice block_for_updates: #{msg_hash}"
|
639
|
-
# Process trigger notifications created by TriggerModel notify
|
640
|
-
if msg_hash['type'] == 'trigger'
|
641
|
-
data = JSON.parse(msg_hash['data'], :allow_nan => true, :create_additions => true)
|
642
|
-
public_send(TOPIC_LOOKUP[msg_hash['kind']], data)
|
643
|
-
end
|
644
|
-
end
|
645
|
-
rescue StandardError => e
|
646
|
-
@logger.error "TriggerGroupMicroservice failed to read topics #{@topics}\n#{e.formatted}"
|
647
|
-
end
|
648
|
-
end
|
649
|
-
end
|
650
|
-
|
651
|
-
def no_op(data)
|
652
|
-
@logger.debug "TriggerGroupMicroservice web socket event: #{data}"
|
653
|
-
end
|
654
|
-
|
655
|
-
# Add the trigger to the share.
|
656
|
-
def created_trigger_event(data)
|
657
|
-
@logger.debug "TriggerGroupMicroservice created_trigger_event #{data}"
|
658
|
-
if data['group'] == @group
|
659
|
-
@share.trigger_base.add(trigger: data)
|
660
|
-
@manager.refresh()
|
661
|
-
end
|
662
|
-
end
|
663
|
-
|
664
|
-
def updated_trigger_event(data)
|
665
|
-
@logger.debug "TriggerGroupMicroservice updated_trigger_event #{data}"
|
666
|
-
if data['group'] == @group
|
667
|
-
@share.trigger_base.update(trigger: data)
|
668
|
-
end
|
669
|
-
end
|
670
|
-
|
671
|
-
# When a trigger is updated it could change items which modifies topics and
|
672
|
-
# potentially adds or removes topics so refresh everything just to be safe
|
673
|
-
def rebuild_trigger_event(data)
|
674
|
-
@logger.debug "TriggerGroupMicroservice rebuild_trigger_event #{data}"
|
675
|
-
if data['group'] == @group
|
676
|
-
@share.trigger_base.update(trigger: data)
|
677
|
-
@read_topic = false
|
678
|
-
end
|
679
|
-
end
|
680
|
-
|
681
|
-
# Remove the trigger from the share.
|
682
|
-
def deleted_trigger_event(data)
|
683
|
-
@logger.debug "TriggerGroupMicroservice deleted_trigger_event #{data}"
|
684
|
-
if data['group'] == @group
|
685
|
-
@share.trigger_base.remove(trigger: data)
|
686
|
-
@manager.refresh()
|
687
|
-
end
|
688
|
-
end
|
689
|
-
|
690
|
-
def shutdown
|
691
|
-
@read_topic = false
|
692
|
-
@manager.shutdown()
|
693
|
-
super
|
694
|
-
end
|
695
|
-
end
|
696
|
-
end
|
697
|
-
|
698
|
-
OpenC3::TriggerGroupMicroservice.run if __FILE__ == $0
|