openc3 7.0.0.pre.rc2 → 7.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/bin/openc3cli +13 -4
- data/bin/pipinstall +6 -7
- data/bin/pipuninstall +3 -5
- data/data/config/interface_modifiers.yaml +1 -1
- data/data/config/item_modifiers.yaml +18 -6
- data/data/config/telemetry.yaml +1 -1
- data/data/config/widgets.yaml +10 -0
- data/lib/openc3/accessors/json_accessor.rb +1 -1
- data/lib/openc3/api/cmd_api.rb +2 -0
- data/lib/openc3/api/settings_api.rb +2 -0
- data/lib/openc3/api/tlm_api.rb +3 -3
- data/lib/openc3/config/config_parser.rb +4 -4
- data/lib/openc3/conversions/conversion.rb +3 -3
- data/lib/openc3/core_ext/faraday.rb +4 -0
- data/lib/openc3/logs/log_writer.rb +24 -6
- data/lib/openc3/logs/packet_log_writer.rb +1 -4
- data/lib/openc3/logs/stream_log_pair.rb +11 -4
- data/lib/openc3/logs/text_log_writer.rb +1 -4
- data/lib/openc3/microservices/interface_microservice.rb +8 -2
- data/lib/openc3/microservices/log_microservice.rb +7 -2
- data/lib/openc3/microservices/microservice.rb +10 -4
- data/lib/openc3/microservices/queue_microservice.rb +9 -2
- data/lib/openc3/microservices/scope_cleanup_microservice.rb +116 -1
- data/lib/openc3/microservices/text_log_microservice.rb +4 -1
- data/lib/openc3/migrations/20241208080000_no_critical_cmd.rb +1 -1
- data/lib/openc3/migrations/20250402000000_periodic_only_default.rb +1 -1
- data/lib/openc3/migrations/20260203000000_remove_store_id.rb +28 -0
- data/lib/openc3/migrations/20260204000000_remove_decom_reducer.rb +29 -1
- data/lib/openc3/models/activity_model.rb +41 -9
- data/lib/openc3/models/auth_model.rb +54 -19
- data/lib/openc3/models/cvt_model.rb +2 -265
- data/lib/openc3/models/model.rb +16 -0
- data/lib/openc3/models/plugin_model.rb +18 -12
- data/lib/openc3/models/plugin_store_model.rb +1 -1
- data/lib/openc3/models/python_package_model.rb +2 -2
- data/lib/openc3/models/queue_model.rb +5 -3
- data/lib/openc3/models/script_engine_model.rb +1 -1
- data/lib/openc3/models/target_model.rb +75 -42
- data/lib/openc3/models/tool_config_model.rb +12 -0
- data/lib/openc3/models/tool_model.rb +18 -5
- data/lib/openc3/models/trigger_model.rb +1 -1
- data/lib/openc3/models/widget_model.rb +2 -9
- data/lib/openc3/operators/operator.rb +9 -7
- data/lib/openc3/packets/json_packet.rb +2 -0
- data/lib/openc3/packets/packet.rb +1 -0
- data/lib/openc3/packets/packet_config.rb +28 -12
- data/lib/openc3/script/calendar.rb +8 -0
- data/lib/openc3/script/script.rb +19 -0
- data/lib/openc3/script/storage.rb +6 -6
- data/lib/openc3/script/web_socket_api.rb +1 -1
- data/lib/openc3/system/system.rb +6 -6
- data/lib/openc3/tools/cmd_tlm_server/interface_thread.rb +0 -2
- data/lib/openc3/top_level.rb +15 -63
- data/lib/openc3/topics/command_topic.rb +1 -0
- data/lib/openc3/topics/limits_event_topic.rb +1 -1
- data/lib/openc3/utilities/authentication.rb +46 -7
- data/lib/openc3/utilities/authorization.rb +8 -1
- data/lib/openc3/utilities/aws_bucket.rb +2 -3
- data/lib/openc3/utilities/bucket_utilities.rb +3 -1
- data/lib/openc3/utilities/cli_generator.rb +7 -0
- data/lib/openc3/utilities/cmd_log.rb +1 -1
- data/lib/openc3/utilities/local_mode.rb +3 -0
- data/lib/openc3/utilities/process_manager.rb +1 -1
- data/lib/openc3/utilities/python_proxy.rb +11 -4
- data/lib/openc3/utilities/questdb_client.rb +764 -2
- data/lib/openc3/utilities/running_script.rb +25 -7
- data/lib/openc3/utilities/script.rb +452 -0
- data/lib/openc3/utilities/secrets.rb +1 -1
- data/lib/openc3/version.rb +5 -5
- data/templates/conversion/conversion.py +0 -8
- data/templates/conversion/conversion.rb +0 -11
- data/templates/tool_angular/package.json +2 -2
- data/templates/tool_react/package.json +1 -1
- data/templates/tool_svelte/package.json +1 -1
- data/templates/tool_vue/package.json +3 -3
- data/templates/widget/package.json +2 -2
- metadata +19 -19
- data/lib/openc3/migrations/20251022000000_remove_unique_id.rb +0 -23
- data/lib/openc3/migrations/20251213120000_reinstall_plugins.rb +0 -45
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# encoding:
|
|
1
|
+
# encoding: utf-8
|
|
2
2
|
|
|
3
3
|
# Copyright 2026 OpenC3, Inc.
|
|
4
4
|
# All Rights Reserved.
|
|
@@ -11,15 +11,68 @@
|
|
|
11
11
|
# This file may also be used under the terms of a commercial license
|
|
12
12
|
# if purchased from OpenC3, Inc.
|
|
13
13
|
|
|
14
|
-
require 'json'
|
|
15
14
|
require 'base64'
|
|
16
15
|
require 'bigdecimal'
|
|
16
|
+
require 'concurrent'
|
|
17
|
+
require 'json'
|
|
18
|
+
require 'pg'
|
|
19
|
+
require 'set'
|
|
20
|
+
require 'time'
|
|
21
|
+
require 'openc3/models/target_model'
|
|
17
22
|
|
|
18
23
|
module OpenC3
|
|
19
24
|
# Utility class for QuestDB data encoding and decoding.
|
|
20
25
|
# This provides a common interface for serializing/deserializing COSMOS data types
|
|
21
26
|
# when writing to and reading from QuestDB.
|
|
22
27
|
class QuestDBClient
|
|
28
|
+
class QuestDBError < StandardError; end
|
|
29
|
+
|
|
30
|
+
# Thread-local PG connection storage using Concurrent::ThreadLocalVar.
|
|
31
|
+
# Each thread gets its own connection to avoid thread-safety issues with PG::Connection.
|
|
32
|
+
# Connections are automatically garbage collected when threads terminate.
|
|
33
|
+
@thread_conn = Concurrent::ThreadLocalVar.new(nil)
|
|
34
|
+
|
|
35
|
+
# Get or create a thread-local PG connection with type mapping configured.
|
|
36
|
+
# Returns the thread-local connection - callers should not close it.
|
|
37
|
+
def self.connection
|
|
38
|
+
conn = @thread_conn.value
|
|
39
|
+
if conn.nil? || conn.finished?
|
|
40
|
+
conn = PG::Connection.new(
|
|
41
|
+
host: ENV['OPENC3_TSDB_HOSTNAME'],
|
|
42
|
+
port: ENV['OPENC3_TSDB_QUERY_PORT'],
|
|
43
|
+
user: ENV['OPENC3_TSDB_USERNAME'],
|
|
44
|
+
password: ENV['OPENC3_TSDB_PASSWORD'],
|
|
45
|
+
dbname: 'qdb'
|
|
46
|
+
)
|
|
47
|
+
conn.type_map_for_results = PG::BasicTypeMapForResults.new(conn)
|
|
48
|
+
@thread_conn.value = conn
|
|
49
|
+
end
|
|
50
|
+
conn
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
# Reset the connection for the current thread. Used after errors.
|
|
54
|
+
def self.disconnect
|
|
55
|
+
conn = @thread_conn.value
|
|
56
|
+
if conn && !conn.finished?
|
|
57
|
+
conn.finish
|
|
58
|
+
end
|
|
59
|
+
@thread_conn.value = nil
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
# Health check - attempt to connect and immediately close.
|
|
63
|
+
# Returns true if successful, raises on failure.
|
|
64
|
+
def self.check_connection
|
|
65
|
+
conn = PG::Connection.new(
|
|
66
|
+
host: ENV['OPENC3_TSDB_HOSTNAME'],
|
|
67
|
+
port: ENV['OPENC3_TSDB_QUERY_PORT'],
|
|
68
|
+
user: ENV['OPENC3_TSDB_USERNAME'],
|
|
69
|
+
password: ENV['OPENC3_TSDB_PASSWORD'],
|
|
70
|
+
dbname: 'qdb'
|
|
71
|
+
)
|
|
72
|
+
conn.close
|
|
73
|
+
true
|
|
74
|
+
end
|
|
75
|
+
|
|
23
76
|
# Special timestamp items that are calculated from PACKET_TIMESECONDS/RECEIVED_TIMESECONDS columns
|
|
24
77
|
# rather than stored as separate columns. PACKET_TIMESECONDS and RECEIVED_TIMESECONDS are stored
|
|
25
78
|
# as timestamp_ns columns and need conversion to float seconds on read. The TIMEFORMATTED items
|
|
@@ -29,6 +82,10 @@ module OpenC3
|
|
|
29
82
|
'RECEIVED_TIMEFORMATTED' => { source: 'RECEIVED_TIMESECONDS', format: :formatted }
|
|
30
83
|
}.freeze
|
|
31
84
|
|
|
85
|
+
# Stored timestamp items that are stored as timestamp_ns columns and need
|
|
86
|
+
# conversion to float seconds on read. Distinguished from calculated items above.
|
|
87
|
+
STORED_TIMESTAMP_ITEMS = Set.new(['PACKET_TIMESECONDS', 'RECEIVED_TIMESECONDS']).freeze
|
|
88
|
+
|
|
32
89
|
# Sentinel values for storing float special values (inf, -inf, nan) in QuestDB.
|
|
33
90
|
# QuestDB stores these as NULL, so we use sentinel values near float max instead.
|
|
34
91
|
|
|
@@ -178,6 +235,115 @@ module OpenC3
|
|
|
178
235
|
item_name.to_s.gsub(/[?\.,'"\\\/:\)\(\+=\-\*\%~;!@#\$\^&]/, '_')
|
|
179
236
|
end
|
|
180
237
|
|
|
238
|
+
# Find an item definition within a packet definition by name.
|
|
239
|
+
#
|
|
240
|
+
# @param packet_def [Hash, nil] Packet definition from TargetModel.packet
|
|
241
|
+
# @param item_name [String] Item name to find
|
|
242
|
+
# @return [Hash, nil] Item definition hash or nil if not found
|
|
243
|
+
def self.find_item_def(packet_def, item_name)
|
|
244
|
+
return nil unless packet_def
|
|
245
|
+
packet_def['items']&.each do |item|
|
|
246
|
+
return item if item['name'] == item_name
|
|
247
|
+
end
|
|
248
|
+
nil
|
|
249
|
+
end
|
|
250
|
+
|
|
251
|
+
# Resolve the data_type and array_size for a QuestDB column based on the
|
|
252
|
+
# item definition and requested value type. This encapsulates the common
|
|
253
|
+
# logic for determining how to decode a value read from QuestDB.
|
|
254
|
+
#
|
|
255
|
+
# @param item_def [Hash, nil] Item definition from packet definition
|
|
256
|
+
# @param value_type [String] One of 'RAW', 'CONVERTED', 'FORMATTED'
|
|
257
|
+
# @return [Hash] { 'data_type' => String|nil, 'array_size' => Integer|nil }
|
|
258
|
+
def self.resolve_item_type(item_def, value_type)
|
|
259
|
+
case value_type
|
|
260
|
+
when 'FORMATTED', 'WITH_UNITS' # WITH_UNITS is deprecated
|
|
261
|
+
{ 'data_type' => 'STRING', 'array_size' => nil }
|
|
262
|
+
when 'CONVERTED'
|
|
263
|
+
if item_def
|
|
264
|
+
rc = item_def['read_conversion']
|
|
265
|
+
if rc && rc['converted_type']
|
|
266
|
+
{ 'data_type' => rc['converted_type'], 'array_size' => item_def['array_size'] }
|
|
267
|
+
elsif item_def['states']
|
|
268
|
+
{ 'data_type' => 'STRING', 'array_size' => nil }
|
|
269
|
+
else
|
|
270
|
+
{ 'data_type' => item_def['data_type'], 'array_size' => item_def['array_size'] }
|
|
271
|
+
end
|
|
272
|
+
else
|
|
273
|
+
{ 'data_type' => nil, 'array_size' => nil }
|
|
274
|
+
end
|
|
275
|
+
else # RAW or default
|
|
276
|
+
if item_def
|
|
277
|
+
{ 'data_type' => item_def['data_type'], 'array_size' => item_def['array_size'] }
|
|
278
|
+
else
|
|
279
|
+
{ 'data_type' => nil, 'array_size' => nil }
|
|
280
|
+
end
|
|
281
|
+
end
|
|
282
|
+
end
|
|
283
|
+
|
|
284
|
+
# Execute a SQL query with automatic retry on connection errors.
|
|
285
|
+
# Handles PG connection management and retries up to max_retries times.
|
|
286
|
+
#
|
|
287
|
+
# @param query [String] SQL query to execute
|
|
288
|
+
# @param params [Array] Query parameters for parameterized queries (uses exec_params)
|
|
289
|
+
# @param max_retries [Integer] Maximum number of retry attempts (default 5)
|
|
290
|
+
# @param label [String, nil] Optional label for log messages
|
|
291
|
+
# @return [PG::Result, nil] Query result
|
|
292
|
+
# @raise [RuntimeError] After exhausting retries
|
|
293
|
+
def self.query_with_retry(query, params: [], max_retries: 5, label: nil)
|
|
294
|
+
retry_count = 0
|
|
295
|
+
begin
|
|
296
|
+
conn = connection
|
|
297
|
+
if params.empty?
|
|
298
|
+
conn.exec(query)
|
|
299
|
+
else
|
|
300
|
+
conn.exec_params(query, params)
|
|
301
|
+
end
|
|
302
|
+
rescue IOError, PG::Error => e
|
|
303
|
+
retry_count += 1
|
|
304
|
+
if retry_count > (max_retries - 1)
|
|
305
|
+
raise QuestDBError.new("Error querying TSDB#{label ? " (#{label})" : ""}: #{e.message}")
|
|
306
|
+
end
|
|
307
|
+
Logger.warn("TSDB#{label ? " #{label}" : ""}: Retrying due to error: #{e.message}")
|
|
308
|
+
Logger.warn("TSDB#{label ? " #{label}" : ""}: Last query: #{query}")
|
|
309
|
+
disconnect
|
|
310
|
+
sleep 0.1
|
|
311
|
+
retry
|
|
312
|
+
end
|
|
313
|
+
end
|
|
314
|
+
|
|
315
|
+
# Convert a nanosecond integer timestamp to a UTC Time object.
|
|
316
|
+
#
|
|
317
|
+
# @param nsec [Integer] Nanoseconds since epoch
|
|
318
|
+
# @return [Time] UTC Time object
|
|
319
|
+
def self.nsec_to_utc_time(nsec)
|
|
320
|
+
return nil unless nsec
|
|
321
|
+
Time.at(nsec / 1_000_000_000, nsec % 1_000_000_000, :nsec, in: '+00:00')
|
|
322
|
+
end
|
|
323
|
+
|
|
324
|
+
# Coerce a value from QuestDB (which may be a Time, Float, Integer, String,
|
|
325
|
+
# or PG timestamp object) into a Ruby UTC Time.
|
|
326
|
+
#
|
|
327
|
+
# @param value [Object] Timestamp value in any supported format
|
|
328
|
+
# @return [Time, nil] UTC Time object or nil
|
|
329
|
+
def self.coerce_to_utc(value)
|
|
330
|
+
return nil unless value
|
|
331
|
+
case value
|
|
332
|
+
when Time
|
|
333
|
+
# PG driver returns Time objects with UTC values but in local timezone,
|
|
334
|
+
# so reconstruct as UTC from components rather than converting
|
|
335
|
+
pg_timestamp_to_utc(value)
|
|
336
|
+
when Float
|
|
337
|
+
Time.at(value).utc
|
|
338
|
+
when Integer
|
|
339
|
+
nsec_to_utc_time(value).utc
|
|
340
|
+
when String
|
|
341
|
+
Time.parse(value).utc
|
|
342
|
+
else
|
|
343
|
+
raise QuestDBError.new("Unsupported timestamp value #{value} with type: #{value.class}")
|
|
344
|
+
end
|
|
345
|
+
end
|
|
346
|
+
|
|
181
347
|
# Convert a PG timestamp to UTC.
|
|
182
348
|
# PG driver returns timestamps as naive Time objects that need UTC treatment.
|
|
183
349
|
# QuestDB stores timestamps in UTC, but the PG driver applies local timezone.
|
|
@@ -206,5 +372,601 @@ module OpenC3
|
|
|
206
372
|
nil
|
|
207
373
|
end
|
|
208
374
|
end
|
|
375
|
+
|
|
376
|
+
# Return the QuestDB column suffix for a given value type.
|
|
377
|
+
#
|
|
378
|
+
# @param value_type [String] One of 'RAW', 'CONVERTED', 'FORMATTED'
|
|
379
|
+
# @return [String] Column suffix (e.g., '__C', '__F', or '')
|
|
380
|
+
def self.column_suffix_for_value_type(value_type)
|
|
381
|
+
case value_type
|
|
382
|
+
when 'FORMATTED', 'WITH_UNITS' # WITH_UNITS is deprecated
|
|
383
|
+
'__F'
|
|
384
|
+
when 'CONVERTED'
|
|
385
|
+
'__C'
|
|
386
|
+
else
|
|
387
|
+
''
|
|
388
|
+
end
|
|
389
|
+
end
|
|
390
|
+
|
|
391
|
+
# Determine the value type from a QuestDB column name's suffix.
|
|
392
|
+
#
|
|
393
|
+
# @param column_name [String] Column name possibly ending in __C, __F, __L
|
|
394
|
+
# @return [String] One of 'FORMATTED', 'CONVERTED', 'RAW'
|
|
395
|
+
def self.value_type_for_column_suffix(column_name)
|
|
396
|
+
if column_name.end_with?('__F')
|
|
397
|
+
'FORMATTED'
|
|
398
|
+
elsif column_name.end_with?('__C')
|
|
399
|
+
'CONVERTED'
|
|
400
|
+
else
|
|
401
|
+
'RAW'
|
|
402
|
+
end
|
|
403
|
+
end
|
|
404
|
+
|
|
405
|
+
# Build a SQL WHERE clause for PACKET_TIMESECONDS range filtering.
|
|
406
|
+
#
|
|
407
|
+
# @param start_time [Integer, String] Start timestamp (nanoseconds)
|
|
408
|
+
# @param end_time [Integer, String, nil] End timestamp (nanoseconds), or nil for open-ended
|
|
409
|
+
# @param prefix [String] Table alias prefix (e.g., 'T0.') — default ''
|
|
410
|
+
# @return [String] SQL WHERE clause fragment (includes leading space)
|
|
411
|
+
def self.time_where_clause(start_time, end_time, prefix: '')
|
|
412
|
+
where = " WHERE #{prefix}PACKET_TIMESECONDS >= #{start_time}"
|
|
413
|
+
where += " AND #{prefix}PACKET_TIMESECONDS < #{end_time}" if end_time
|
|
414
|
+
where
|
|
415
|
+
end
|
|
416
|
+
|
|
417
|
+
# Fetch a packet definition from TargetModel, returning nil if not found.
|
|
418
|
+
#
|
|
419
|
+
# @param target_name [String] Target name
|
|
420
|
+
# @param packet_name [String] Packet name
|
|
421
|
+
# @param type [Symbol] :CMD or :TLM (default :TLM)
|
|
422
|
+
# @param scope [String] Scope name
|
|
423
|
+
# @return [Hash, nil] Packet definition or nil
|
|
424
|
+
def self.fetch_packet_def(target_name, packet_name, type: :TLM, scope: "DEFAULT")
|
|
425
|
+
TargetModel.packet(target_name, packet_name, type: type, scope: scope)
|
|
426
|
+
rescue RuntimeError
|
|
427
|
+
nil
|
|
428
|
+
end
|
|
429
|
+
|
|
430
|
+
# Build a hash mapping sanitized column names to item definitions.
|
|
431
|
+
# Used for type-aware decoding of QuestDB SELECT * results.
|
|
432
|
+
#
|
|
433
|
+
# @param packet_def [Hash, nil] Packet definition from TargetModel.packet
|
|
434
|
+
# @return [Hash] { sanitized_column_name => item_def_hash }
|
|
435
|
+
def self.build_item_defs_map(packet_def)
|
|
436
|
+
map = {}
|
|
437
|
+
return map unless packet_def
|
|
438
|
+
packet_def['items']&.each do |item|
|
|
439
|
+
map[sanitize_column_name(item['name'])] = item
|
|
440
|
+
end
|
|
441
|
+
map
|
|
442
|
+
end
|
|
443
|
+
|
|
444
|
+
# Build aggregation SELECT columns (min/max/avg/stddev) for a single item.
|
|
445
|
+
# Returns the SELECT fragments and a column_mapping hash.
|
|
446
|
+
#
|
|
447
|
+
# @param safe_item_name [String] Sanitized column name
|
|
448
|
+
# @param value_type [Symbol] :RAW or :CONVERTED
|
|
449
|
+
# @param item_name [String, nil] Original (unsanitized) item name for mapping values.
|
|
450
|
+
# Defaults to safe_item_name if not provided.
|
|
451
|
+
# @return [Array<String>, Hash] Two-element array: [select_fragments, column_mapping]
|
|
452
|
+
# column_mapping maps result column alias to [item_name, reduced_type, value_type]
|
|
453
|
+
def self.build_aggregation_selects(safe_item_name, value_type, item_name: nil)
|
|
454
|
+
item_name ||= safe_item_name
|
|
455
|
+
selects = []
|
|
456
|
+
mapping = {}
|
|
457
|
+
case value_type
|
|
458
|
+
when :RAW
|
|
459
|
+
col = safe_item_name
|
|
460
|
+
{ 'N' => :MIN, 'X' => :MAX, 'A' => :AVG, 'S' => :STDDEV }.each do |suffix, reduced_type|
|
|
461
|
+
alias_name = "#{safe_item_name}__#{suffix}"
|
|
462
|
+
selects << "#{reduced_type.to_s.downcase}(\"#{col}\") as \"#{alias_name}\""
|
|
463
|
+
mapping[alias_name] = [item_name, reduced_type, :RAW]
|
|
464
|
+
end
|
|
465
|
+
when :CONVERTED
|
|
466
|
+
col = "#{safe_item_name}__C"
|
|
467
|
+
{ 'CN' => :MIN, 'CX' => :MAX, 'CA' => :AVG, 'CS' => :STDDEV }.each do |suffix, reduced_type|
|
|
468
|
+
alias_name = "#{safe_item_name}__#{suffix}"
|
|
469
|
+
selects << "#{reduced_type.to_s.downcase}(\"#{col}\") as \"#{alias_name}\""
|
|
470
|
+
mapping[alias_name] = [item_name, reduced_type, :CONVERTED]
|
|
471
|
+
end
|
|
472
|
+
else
|
|
473
|
+
# No aggregation for FORMATTED type since it is a string
|
|
474
|
+
raise QuestDBError.new("Unsupported value type for aggregation: #{value_type}")
|
|
475
|
+
end
|
|
476
|
+
[selects, mapping]
|
|
477
|
+
end
|
|
478
|
+
|
|
479
|
+
# Build aggregation SELECT columns for all numeric items in a packet definition.
|
|
480
|
+
# Filters out STRING, BLOCK, and DERIVED items since they can't be aggregated.
|
|
481
|
+
#
|
|
482
|
+
# @param packet_def [Hash, nil] Packet definition from TargetModel.packet
|
|
483
|
+
# @param value_type [Symbol] :RAW or :CONVERTED
|
|
484
|
+
# @return [Array<String>, Boolean] Two-element array: [select_fragments, has_numeric_items]
|
|
485
|
+
# select_fragments includes TIMESTAMP_SELECT as the first element.
|
|
486
|
+
def self.build_packet_reduced_selects(packet_def, value_type)
|
|
487
|
+
selects = [TIMESTAMP_SELECT]
|
|
488
|
+
has_items = false
|
|
489
|
+
return [selects, false] unless packet_def && packet_def['items']
|
|
490
|
+
|
|
491
|
+
packet_def['items'].each do |item|
|
|
492
|
+
data_type = item['data_type']
|
|
493
|
+
next if data_type.nil?
|
|
494
|
+
next if ['STRING', 'BLOCK', 'DERIVED'].include?(data_type)
|
|
495
|
+
next unless value_type == :RAW || value_type == :CONVERTED
|
|
496
|
+
|
|
497
|
+
safe_name = sanitize_column_name(item['name'])
|
|
498
|
+
agg_selects, _mapping = build_aggregation_selects(safe_name, value_type)
|
|
499
|
+
selects.concat(agg_selects)
|
|
500
|
+
has_items = true
|
|
501
|
+
end
|
|
502
|
+
|
|
503
|
+
[selects, has_items]
|
|
504
|
+
end
|
|
505
|
+
|
|
506
|
+
# Add TIMESECONDS and TIMEFORMATTED entries to a hash from a nanosecond timestamp.
|
|
507
|
+
# Used when building packet entries from CAST(timestamp AS LONG) columns.
|
|
508
|
+
#
|
|
509
|
+
# @param entry [Hash] Entry hash to populate
|
|
510
|
+
# @param timestamp_ns [Integer] Nanoseconds since epoch
|
|
511
|
+
# @param prefix [String] 'PACKET' or 'RECEIVED'
|
|
512
|
+
def self.add_timestamp_entries!(entry, timestamp_ns, prefix)
|
|
513
|
+
return unless timestamp_ns
|
|
514
|
+
utc_time = nsec_to_utc_time(timestamp_ns)
|
|
515
|
+
entry["#{prefix}_TIMESECONDS"] = format_timestamp(utc_time, :seconds)
|
|
516
|
+
entry["#{prefix}_TIMEFORMATTED"] = format_timestamp(utc_time, :formatted)
|
|
517
|
+
end
|
|
518
|
+
|
|
519
|
+
# SQL: nanosecond-precision packet timestamp for explicit SELECT lists.
|
|
520
|
+
# PG wire protocol truncates timestamp_ns to microseconds; CAST AS LONG preserves full precision.
|
|
521
|
+
TIMESTAMP_SELECT = 'CAST(PACKET_TIMESECONDS AS LONG) as PACKET_TIMESECONDS'
|
|
522
|
+
|
|
523
|
+
# SQL: nanosecond-precision timestamps for SELECT * queries (different aliases avoid column name collision).
|
|
524
|
+
TIMESTAMP_EXTRAS = 'CAST(PACKET_TIMESECONDS AS LONG) as "__pkt_time_ns", CAST(RECEIVED_TIMESECONDS AS LONG) as "__rx_time_ns"'
|
|
525
|
+
|
|
526
|
+
# Returns the SAMPLE BY interval string for a given stream_mode symbol.
|
|
527
|
+
#
|
|
528
|
+
# @param stream_mode [Symbol] :REDUCED_MINUTE, :REDUCED_HOUR, or :REDUCED_DAY
|
|
529
|
+
# @return [String] QuestDB SAMPLE BY interval string
|
|
530
|
+
def self.sample_interval_for(stream_mode)
|
|
531
|
+
case stream_mode
|
|
532
|
+
when :REDUCED_MINUTE then '1m'
|
|
533
|
+
when :REDUCED_HOUR then '1h'
|
|
534
|
+
when :REDUCED_DAY then '1d'
|
|
535
|
+
else '1m'
|
|
536
|
+
end
|
|
537
|
+
end
|
|
538
|
+
|
|
539
|
+
# Returns true if the given TSDB table exists and has at least one row in the time range.
|
|
540
|
+
#
|
|
541
|
+
# @param table_name [String] Sanitized table name
|
|
542
|
+
# @param start_time [Integer] Nanosecond start time
|
|
543
|
+
# @param end_time [Integer, nil] Nanosecond end time
|
|
544
|
+
# @return [Boolean]
|
|
545
|
+
def self.table_has_data?(table_name, start_time, end_time)
|
|
546
|
+
query = "SELECT 1 FROM #{table_name}"
|
|
547
|
+
query += time_where_clause(start_time, end_time)
|
|
548
|
+
query += " LIMIT 1"
|
|
549
|
+
result = query_with_retry(query, max_retries: 1, label: "table_has_data")
|
|
550
|
+
result && result.ntuples > 0
|
|
551
|
+
rescue RuntimeError
|
|
552
|
+
false
|
|
553
|
+
end
|
|
554
|
+
|
|
555
|
+
# Execute a paginated TSDB query, yielding each non-empty PG::Result page.
|
|
556
|
+
# Handles LIMIT pagination and retry on error.
|
|
557
|
+
#
|
|
558
|
+
# @param query [String] Base SQL query (without LIMIT clause)
|
|
559
|
+
# @param page_size [Integer] Number of rows per page
|
|
560
|
+
# @param label [String] Label for log messages
|
|
561
|
+
# @yield [PG::Result] Each page of results
|
|
562
|
+
def self.paginate_query(query, page_size, label:)
|
|
563
|
+
min = 0
|
|
564
|
+
max = page_size
|
|
565
|
+
loop do
|
|
566
|
+
query_offset = "#{query} LIMIT #{min}, #{max}"
|
|
567
|
+
Logger.debug("QuestDB #{label}: #{query_offset}")
|
|
568
|
+
result = query_with_retry(query_offset, label: label)
|
|
569
|
+
min += page_size
|
|
570
|
+
max += page_size
|
|
571
|
+
if result.nil? or result.ntuples == 0
|
|
572
|
+
return
|
|
573
|
+
else
|
|
574
|
+
yield result
|
|
575
|
+
end
|
|
576
|
+
end
|
|
577
|
+
end
|
|
578
|
+
|
|
579
|
+
# Build a SELECT query for specific item columns from a single table.
|
|
580
|
+
#
|
|
581
|
+
# @param table_name [String] Sanitized QuestDB table name
|
|
582
|
+
# @param column_names [Array<String>] Quoted column expressions (e.g., '"TEMP1__C"')
|
|
583
|
+
# @param start_time [Integer] Start timestamp in nanoseconds
|
|
584
|
+
# @param end_time [Integer, nil] End timestamp in nanoseconds
|
|
585
|
+
# @param include_received_ts [Boolean] Whether to include RECEIVED_TIMESECONDS
|
|
586
|
+
# @return [String] Complete SQL query (without LIMIT clause)
|
|
587
|
+
def self.build_item_columns_query(table_name, column_names, start_time, end_time, include_received_ts: false)
|
|
588
|
+
names = column_names.dup
|
|
589
|
+
names << TIMESTAMP_SELECT
|
|
590
|
+
names << "RECEIVED_TIMESECONDS" if include_received_ts
|
|
591
|
+
names << "COSMOS_EXTRA"
|
|
592
|
+
query = "SELECT #{names.join(', ')} FROM #{table_name}"
|
|
593
|
+
query += time_where_clause(start_time, end_time)
|
|
594
|
+
query
|
|
595
|
+
end
|
|
596
|
+
|
|
597
|
+
# Build a SELECT * query for full packet data from a single table.
|
|
598
|
+
#
|
|
599
|
+
# @param table_name [String] Sanitized QuestDB table name
|
|
600
|
+
# @param start_time [Integer] Start timestamp in nanoseconds
|
|
601
|
+
# @param end_time [Integer, nil] End timestamp in nanoseconds
|
|
602
|
+
# @return [String] Complete SQL query (without LIMIT clause)
|
|
603
|
+
def self.build_packet_query(table_name, start_time, end_time)
|
|
604
|
+
query = "SELECT *, #{TIMESTAMP_EXTRAS} FROM \"#{table_name}\""
|
|
605
|
+
query += time_where_clause(start_time, end_time)
|
|
606
|
+
query
|
|
607
|
+
end
|
|
608
|
+
|
|
609
|
+
# Build a SAMPLE BY aggregation query for reduced data.
|
|
610
|
+
#
|
|
611
|
+
# @param table_name [String] Sanitized QuestDB table name
|
|
612
|
+
# @param select_columns [Array<String>] SELECT column expressions including aggregations
|
|
613
|
+
# @param start_time [Integer] Start timestamp in nanoseconds
|
|
614
|
+
# @param end_time [Integer, nil] End timestamp in nanoseconds
|
|
615
|
+
# @param sample_interval [String] QuestDB SAMPLE BY interval ('1m', '1h', '1d')
|
|
616
|
+
# @return [String] Complete SQL query (without LIMIT clause)
|
|
617
|
+
def self.build_reduced_query(table_name, select_columns, start_time, end_time, sample_interval)
|
|
618
|
+
query = "SELECT #{select_columns.join(', ')} FROM \"#{table_name}\""
|
|
619
|
+
query += time_where_clause(start_time, end_time)
|
|
620
|
+
query += " SAMPLE BY #{sample_interval}"
|
|
621
|
+
query += " ALIGN TO CALENDAR"
|
|
622
|
+
query += " ORDER BY PACKET_TIMESECONDS"
|
|
623
|
+
query
|
|
624
|
+
end
|
|
625
|
+
|
|
626
|
+
# Decode a single row from a per-table item columns query into an entry hash.
|
|
627
|
+
# Handles stored timestamps, calculated timestamps, and regular value decoding.
|
|
628
|
+
#
|
|
629
|
+
# @param row [PG::Result row] Single row (iterable as [col_name, value] pairs)
|
|
630
|
+
# @param sql_to_local [Array<Integer>] Mapping from SQL column index to meta position
|
|
631
|
+
# @param meta [Hash] Per-table metadata with keys:
|
|
632
|
+
# :item_keys [Array<String>] - ordered list of item key identifiers
|
|
633
|
+
# :item_types [Array<Hash>] - type info per position ({ 'data_type' =>, 'array_size' => })
|
|
634
|
+
# :stored_timestamp_item_keys [Hash] - { item_key => { column: col_name } }
|
|
635
|
+
# :calculated_positions [Hash] - { local_idx => { source: col_name, format: :seconds/:formatted } }
|
|
636
|
+
# @return [Hash] Entry hash with __type, item_key => value, __time, COSMOS_EXTRA
|
|
637
|
+
def self.decode_item_row(row, sql_to_local, meta)
|
|
638
|
+
num_sql_item_cols = sql_to_local.length
|
|
639
|
+
|
|
640
|
+
entry = { "__type" => "ITEMS" }
|
|
641
|
+
timestamp_values = {}
|
|
642
|
+
time_ns = nil
|
|
643
|
+
cosmos_extra = nil
|
|
644
|
+
|
|
645
|
+
values = Array.new(meta[:item_keys].length)
|
|
646
|
+
|
|
647
|
+
row.each_with_index do |tuple, sql_index|
|
|
648
|
+
col_name = tuple[0]
|
|
649
|
+
value = tuple[1]
|
|
650
|
+
|
|
651
|
+
# Fixed columns come after item columns
|
|
652
|
+
if sql_index >= num_sql_item_cols
|
|
653
|
+
case col_name
|
|
654
|
+
when 'PACKET_TIMESECONDS'
|
|
655
|
+
time_ns = value.to_i
|
|
656
|
+
timestamp_values['PACKET_TIMESECONDS'] = nsec_to_utc_time(time_ns)
|
|
657
|
+
when 'RECEIVED_TIMESECONDS'
|
|
658
|
+
timestamp_values['RECEIVED_TIMESECONDS'] = value if value
|
|
659
|
+
when 'COSMOS_EXTRA'
|
|
660
|
+
cosmos_extra = value
|
|
661
|
+
# No else because we're only interested in these specific extra columns; others can be ignored
|
|
662
|
+
end
|
|
663
|
+
next
|
|
664
|
+
end
|
|
665
|
+
|
|
666
|
+
local_idx = sql_to_local[sql_index]
|
|
667
|
+
|
|
668
|
+
# Track timestamp values from item columns
|
|
669
|
+
if col_name == 'RECEIVED_TIMESECONDS'
|
|
670
|
+
timestamp_values['RECEIVED_TIMESECONDS'] = value
|
|
671
|
+
end
|
|
672
|
+
|
|
673
|
+
next if value.nil?
|
|
674
|
+
|
|
675
|
+
type_info = meta[:item_types][local_idx] || {}
|
|
676
|
+
if meta[:stored_timestamp_item_keys].key?(meta[:item_keys][local_idx])
|
|
677
|
+
ts_utc = coerce_to_utc(value)
|
|
678
|
+
values[local_idx] = format_timestamp(ts_utc, :seconds) if ts_utc
|
|
679
|
+
else
|
|
680
|
+
values[local_idx] = decode_value(
|
|
681
|
+
value,
|
|
682
|
+
data_type: type_info['data_type'],
|
|
683
|
+
array_size: type_info['array_size']
|
|
684
|
+
)
|
|
685
|
+
end
|
|
686
|
+
end
|
|
687
|
+
|
|
688
|
+
# Build ordered entry hash with calculated items in their natural position
|
|
689
|
+
meta[:item_keys].each_with_index do |item_key, local_idx|
|
|
690
|
+
if meta[:calculated_positions].key?(local_idx)
|
|
691
|
+
calc_info = meta[:calculated_positions][local_idx]
|
|
692
|
+
ts_value = timestamp_values[calc_info[:source]]
|
|
693
|
+
next unless ts_value
|
|
694
|
+
ts_utc = coerce_to_utc(ts_value)
|
|
695
|
+
calculated_value = format_timestamp(ts_utc, calc_info[:format])
|
|
696
|
+
entry[item_key] = calculated_value if calculated_value
|
|
697
|
+
elsif !values[local_idx].nil?
|
|
698
|
+
entry[item_key] = values[local_idx]
|
|
699
|
+
end
|
|
700
|
+
end
|
|
701
|
+
|
|
702
|
+
entry['__time'] = time_ns if time_ns
|
|
703
|
+
entry['COSMOS_EXTRA'] = cosmos_extra if cosmos_extra
|
|
704
|
+
entry
|
|
705
|
+
end
|
|
706
|
+
|
|
707
|
+
# Decode a single row from a SELECT * packet query into an entry hash.
|
|
708
|
+
# Handles nanosecond timestamp CAST columns, value-type column preference,
|
|
709
|
+
# and type-aware decoding.
|
|
710
|
+
#
|
|
711
|
+
# @param row [PG::Result row] Single row as iterable [col_name, value] pairs
|
|
712
|
+
# @param value_type [Symbol] :RAW, :CONVERTED, :FORMATTED
|
|
713
|
+
# @param packet_def [Hash, nil] Packet definition for type-aware decoding
|
|
714
|
+
# @return [Hash] Entry hash with item => value, __time, COSMOS_EXTRA, timestamp entries
|
|
715
|
+
def self.decode_packet_row(row, value_type, packet_def)
|
|
716
|
+
entry = {}
|
|
717
|
+
item_defs = build_item_defs_map(packet_def)
|
|
718
|
+
|
|
719
|
+
# First pass: build a hash of all columns for value-type preference lookups
|
|
720
|
+
columns = {}
|
|
721
|
+
row.each do |tuple|
|
|
722
|
+
columns[tuple[0]] = tuple[1]
|
|
723
|
+
end
|
|
724
|
+
|
|
725
|
+
cosmos_timestamp_ns = nil
|
|
726
|
+
received_timestamp_ns = nil
|
|
727
|
+
|
|
728
|
+
# Second pass: process columns based on value_type
|
|
729
|
+
row.each do |tuple|
|
|
730
|
+
column_name = tuple[0]
|
|
731
|
+
raw_value = tuple[1]
|
|
732
|
+
|
|
733
|
+
if column_name == '__pkt_time_ns'
|
|
734
|
+
cosmos_timestamp_ns = raw_value.to_i
|
|
735
|
+
entry['__time'] = cosmos_timestamp_ns
|
|
736
|
+
next
|
|
737
|
+
end
|
|
738
|
+
|
|
739
|
+
if column_name == '__rx_time_ns'
|
|
740
|
+
received_timestamp_ns = raw_value.to_i
|
|
741
|
+
next
|
|
742
|
+
end
|
|
743
|
+
|
|
744
|
+
# Skip PG timestamp versions - handled via CAST AS LONG columns above
|
|
745
|
+
next if column_name == 'PACKET_TIMESECONDS'
|
|
746
|
+
next if column_name == 'RECEIVED_TIMESECONDS'
|
|
747
|
+
next if column_name == 'COSMOS_DATA_TAG'
|
|
748
|
+
|
|
749
|
+
if column_name == 'COSMOS_EXTRA'
|
|
750
|
+
entry['COSMOS_EXTRA'] = raw_value
|
|
751
|
+
next
|
|
752
|
+
end
|
|
753
|
+
|
|
754
|
+
base_name = column_name.sub(/(__C|__F|__U)$/, '')
|
|
755
|
+
item_def = item_defs[base_name]
|
|
756
|
+
|
|
757
|
+
col_value_type = value_type_for_column_suffix(column_name)
|
|
758
|
+
type_info = resolve_item_type(item_def, col_value_type)
|
|
759
|
+
value = decode_value(raw_value, data_type: type_info['data_type'], array_size: type_info['array_size'])
|
|
760
|
+
|
|
761
|
+
case value_type
|
|
762
|
+
when :RAW
|
|
763
|
+
next if column_name.end_with?('__C', '__F', '__U')
|
|
764
|
+
entry[column_name] = value
|
|
765
|
+
when :CONVERTED
|
|
766
|
+
if column_name.end_with?('__C')
|
|
767
|
+
entry[column_name.sub(/__C$/, '')] = value
|
|
768
|
+
elsif !column_name.end_with?('__F', '__U') && !columns.key?("#{column_name}__C")
|
|
769
|
+
entry[column_name] = value
|
|
770
|
+
end
|
|
771
|
+
when :FORMATTED
|
|
772
|
+
if column_name.end_with?('__F')
|
|
773
|
+
entry[column_name.sub(/__F$/, '')] = value
|
|
774
|
+
elsif column_name.end_with?('__C') && !columns.key?("#{column_name.sub(/__C$/, '')}__F")
|
|
775
|
+
entry[column_name.sub(/__C$/, '')] = value
|
|
776
|
+
elsif !column_name.end_with?('__C', '__F', '__U') && !columns.key?("#{column_name}__F") && !columns.key?("#{column_name}__C")
|
|
777
|
+
entry[column_name] = value
|
|
778
|
+
end
|
|
779
|
+
else
|
|
780
|
+
raise QuestDBError.new("Unsupported value type for packet decoding: #{value_type}")
|
|
781
|
+
end
|
|
782
|
+
end
|
|
783
|
+
|
|
784
|
+
add_timestamp_entries!(entry, cosmos_timestamp_ns, 'PACKET')
|
|
785
|
+
add_timestamp_entries!(entry, received_timestamp_ns, 'RECEIVED')
|
|
786
|
+
entry
|
|
787
|
+
end
|
|
788
|
+
|
|
789
|
+
# Decode a single row from a SAMPLE BY aggregation query.
|
|
790
|
+
# All non-timestamp columns are decoded as DOUBLE (aggregation results are always numeric).
|
|
791
|
+
#
|
|
792
|
+
# @param row [PG::Result row] Single row as iterable [col_name, value] pairs
|
|
793
|
+
# @return [Hash] { col_name => decoded_value, '__time' => ns_integer }
|
|
794
|
+
def self.decode_reduced_row(row)
|
|
795
|
+
entry = {}
|
|
796
|
+
row.each do |tuple|
|
|
797
|
+
col_name = tuple[0]
|
|
798
|
+
value = tuple[1]
|
|
799
|
+
if col_name == 'PACKET_TIMESECONDS'
|
|
800
|
+
entry['__time'] = value.to_i
|
|
801
|
+
else
|
|
802
|
+
entry[col_name] = decode_value(value, data_type: 'DOUBLE', array_size: nil)
|
|
803
|
+
end
|
|
804
|
+
end
|
|
805
|
+
entry
|
|
806
|
+
end
|
|
807
|
+
|
|
808
|
+
# Query historical telemetry data from QuestDB for a list of items.
|
|
809
|
+
# Builds the SQL query, executes it, and decodes all results.
|
|
810
|
+
#
|
|
811
|
+
# @param items [Array] Array of [target_name, packet_name, item_name, value_type, limits]
|
|
812
|
+
# item_name may be nil to indicate a placeholder (non-existent item)
|
|
813
|
+
# @param start_time [String, Numeric] Start timestamp for the query
|
|
814
|
+
# @param end_time [String, Numeric, nil] End timestamp, or nil for "latest single row"
|
|
815
|
+
# @param scope [String] Scope name
|
|
816
|
+
# @return [Array, Hash] Array of [value, limits_state] pairs per row, or {} if no results.
|
|
817
|
+
# Single-row results return a flat array; multi-row results return array of arrays.
|
|
818
|
+
def self.tsdb_lookup(items, start_time:, end_time: nil, scope: "DEFAULT")
|
|
819
|
+
tables = {}
|
|
820
|
+
names = []
|
|
821
|
+
nil_count = 0
|
|
822
|
+
packet_cache = {}
|
|
823
|
+
item_types = {}
|
|
824
|
+
calculated_items = {}
|
|
825
|
+
needed_timestamps = {}
|
|
826
|
+
current_position = 0
|
|
827
|
+
|
|
828
|
+
items.each do |item|
|
|
829
|
+
target_name, packet_name, orig_item_name, value_type, limits = item
|
|
830
|
+
if orig_item_name.nil?
|
|
831
|
+
names << "PACKET_TIMESECONDS as __nil#{nil_count}"
|
|
832
|
+
nil_count += 1
|
|
833
|
+
current_position += 1
|
|
834
|
+
next
|
|
835
|
+
end
|
|
836
|
+
table_name = sanitize_table_name(target_name, packet_name, scope: scope)
|
|
837
|
+
tables[table_name] = 1
|
|
838
|
+
index = tables.find_index {|k,_v| k == table_name }
|
|
839
|
+
|
|
840
|
+
if STORED_TIMESTAMP_ITEMS.include?(orig_item_name)
|
|
841
|
+
names << "\"T#{index}.#{orig_item_name}\""
|
|
842
|
+
current_position += 1
|
|
843
|
+
next
|
|
844
|
+
end
|
|
845
|
+
|
|
846
|
+
if TIMESTAMP_ITEMS.key?(orig_item_name)
|
|
847
|
+
ts_info = TIMESTAMP_ITEMS[orig_item_name]
|
|
848
|
+
calculated_items[current_position] = {
|
|
849
|
+
source: ts_info[:source],
|
|
850
|
+
format: ts_info[:format],
|
|
851
|
+
table_index: index
|
|
852
|
+
}
|
|
853
|
+
needed_timestamps[index] ||= Set.new
|
|
854
|
+
needed_timestamps[index] << ts_info[:source]
|
|
855
|
+
current_position += 1
|
|
856
|
+
next
|
|
857
|
+
end
|
|
858
|
+
|
|
859
|
+
safe_item_name = sanitize_column_name(orig_item_name)
|
|
860
|
+
|
|
861
|
+
cache_key = [target_name, packet_name]
|
|
862
|
+
unless packet_cache.key?(cache_key)
|
|
863
|
+
packet_cache[cache_key] = fetch_packet_def(target_name, packet_name, scope: scope)
|
|
864
|
+
end
|
|
865
|
+
|
|
866
|
+
packet_def = packet_cache[cache_key]
|
|
867
|
+
item_def = find_item_def(packet_def, orig_item_name)
|
|
868
|
+
|
|
869
|
+
suffix = column_suffix_for_value_type(value_type)
|
|
870
|
+
col_name = "T#{index}.#{safe_item_name}#{suffix}"
|
|
871
|
+
names << "\"#{col_name}\""
|
|
872
|
+
item_types[col_name] = resolve_item_type(item_def, value_type)
|
|
873
|
+
current_position += 1
|
|
874
|
+
if limits
|
|
875
|
+
names << "\"T#{index}.#{safe_item_name}__L\""
|
|
876
|
+
end
|
|
877
|
+
end
|
|
878
|
+
|
|
879
|
+
# Add needed timestamp columns to the SELECT for calculated items
|
|
880
|
+
needed_timestamps.each do |table_index, ts_columns|
|
|
881
|
+
ts_columns.each do |ts_col|
|
|
882
|
+
names << "T#{table_index}.#{ts_col} as T#{table_index}___ts_#{ts_col}"
|
|
883
|
+
end
|
|
884
|
+
end
|
|
885
|
+
|
|
886
|
+
# Build the SQL query
|
|
887
|
+
query = "SELECT #{names.join(", ")} FROM "
|
|
888
|
+
tables.each_with_index do |(table_name, _), index|
|
|
889
|
+
if index == 0
|
|
890
|
+
query += "#{table_name} as T#{index} "
|
|
891
|
+
else
|
|
892
|
+
query += "ASOF JOIN #{table_name} as T#{index} "
|
|
893
|
+
end
|
|
894
|
+
end
|
|
895
|
+
query_params = []
|
|
896
|
+
if start_time && !end_time
|
|
897
|
+
query += "WHERE T0.PACKET_TIMESECONDS < $1 LIMIT -1"
|
|
898
|
+
query_params << start_time
|
|
899
|
+
elsif start_time && end_time
|
|
900
|
+
query += "WHERE T0.PACKET_TIMESECONDS >= $1 AND T0.PACKET_TIMESECONDS < $2"
|
|
901
|
+
query_params << start_time
|
|
902
|
+
query_params << end_time
|
|
903
|
+
end
|
|
904
|
+
|
|
905
|
+
result = query_with_retry(query, params: query_params, label: "tsdb_lookup")
|
|
906
|
+
if result.nil? or result.ntuples == 0
|
|
907
|
+
return {}
|
|
908
|
+
end
|
|
909
|
+
|
|
910
|
+
data = []
|
|
911
|
+
result.each_with_index do |tuples, row_num|
|
|
912
|
+
data[row_num] ||= []
|
|
913
|
+
row_index = 0
|
|
914
|
+
row_timestamps = {}
|
|
915
|
+
tuples.each do |tuple|
|
|
916
|
+
col_name = tuple[0]
|
|
917
|
+
col_value = tuple[1]
|
|
918
|
+
if col_name.include?("__L")
|
|
919
|
+
data[row_num][row_index - 1][1] = col_value
|
|
920
|
+
elsif col_name =~ /^__nil/
|
|
921
|
+
data[row_num][row_index] = [nil, nil]
|
|
922
|
+
row_index += 1
|
|
923
|
+
elsif col_name =~ /^T(\d+)___ts_(.+)$/
|
|
924
|
+
table_idx = $1.to_i
|
|
925
|
+
ts_source = $2
|
|
926
|
+
row_timestamps["T#{table_idx}.#{ts_source}"] = col_value
|
|
927
|
+
elsif col_name.end_with?('.PACKET_TIMESECONDS', '.RECEIVED_TIMESECONDS') || col_name == 'PACKET_TIMESECONDS' || col_name == 'RECEIVED_TIMESECONDS'
|
|
928
|
+
ts_utc = coerce_to_utc(col_value)
|
|
929
|
+
seconds_value = format_timestamp(ts_utc, :seconds)
|
|
930
|
+
data[row_num][row_index] = [seconds_value, nil]
|
|
931
|
+
row_index += 1
|
|
932
|
+
if col_name.include?('.')
|
|
933
|
+
row_timestamps[col_name] = col_value
|
|
934
|
+
else
|
|
935
|
+
row_timestamps["T0.#{col_name}"] = col_value
|
|
936
|
+
end
|
|
937
|
+
else
|
|
938
|
+
type_info = item_types[col_name]
|
|
939
|
+
unless type_info
|
|
940
|
+
tables.length.times do |i|
|
|
941
|
+
prefixed_name = "T#{i}.#{col_name}"
|
|
942
|
+
type_info = item_types[prefixed_name]
|
|
943
|
+
break if type_info
|
|
944
|
+
end
|
|
945
|
+
type_info ||= {}
|
|
946
|
+
end
|
|
947
|
+
decoded_value = decode_value(
|
|
948
|
+
col_value,
|
|
949
|
+
data_type: type_info['data_type'],
|
|
950
|
+
array_size: type_info['array_size']
|
|
951
|
+
)
|
|
952
|
+
data[row_num][row_index] = [decoded_value, nil]
|
|
953
|
+
row_index += 1
|
|
954
|
+
end
|
|
955
|
+
end
|
|
956
|
+
|
|
957
|
+
calculated_items.keys.sort.each do |position|
|
|
958
|
+
calc_info = calculated_items[position]
|
|
959
|
+
ts_key = "T#{calc_info[:table_index]}.#{calc_info[:source]}"
|
|
960
|
+
ts_value = row_timestamps[ts_key]
|
|
961
|
+
ts_utc = coerce_to_utc(ts_value)
|
|
962
|
+
calculated_value = format_timestamp(ts_utc, calc_info[:format])
|
|
963
|
+
data[row_num].insert(position, [calculated_value, nil])
|
|
964
|
+
end
|
|
965
|
+
end
|
|
966
|
+
if result.ntuples == 1
|
|
967
|
+
data = data[0]
|
|
968
|
+
end
|
|
969
|
+
data
|
|
970
|
+
end
|
|
209
971
|
end
|
|
210
972
|
end
|