tcd 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/lib/tcd/reader.rb ADDED
@@ -0,0 +1,224 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "bit_buffer"
4
+ require_relative "header"
5
+ require_relative "lookup_tables"
6
+ require_relative "constituent"
7
+ require_relative "station"
8
+
9
+ module TCD
10
+ # Main reader class for TCD (Tidal Constituent Database) files.
11
+ # Provides access to header info, lookup tables, constituents, and stations.
12
+ class Reader
13
+ attr_reader :path, :header, :lookup_tables, :constituent_data
14
+
15
+ def initialize(path)
16
+ @path = path
17
+ @file = File.open(path, "rb")
18
+ @stations_loaded = false
19
+ @stations = []
20
+
21
+ load_metadata
22
+ end
23
+
24
+ # Close the file handle
25
+ def close
26
+ @file.close unless @file.closed?
27
+ end
28
+
29
+ # Database version string
30
+ def version
31
+ @header.version
32
+ end
33
+
34
+ # Last modified date string
35
+ def last_modified
36
+ @header.last_modified
37
+ end
38
+
39
+ # Number of station records
40
+ def station_count
41
+ @header.number_of_records
42
+ end
43
+
44
+ # Number of constituents
45
+ def constituent_count
46
+ @header.constituents
47
+ end
48
+
49
+ # Year range covered by equilibrium/node factor data
50
+ def year_range
51
+ start_year = @header.start_year
52
+ end_year = start_year + @header.number_of_years - 1
53
+ start_year..end_year
54
+ end
55
+
56
+ # File size in bytes
57
+ def file_size
58
+ @header.end_of_file
59
+ end
60
+
61
+ # Access constituents
62
+ def constituents
63
+ @constituent_data
64
+ end
65
+
66
+ # Find constituent by name
67
+ def constituent(name)
68
+ @constituent_data.find(name)
69
+ end
70
+
71
+ # Load and return all stations (lazy-loaded)
72
+ def stations
73
+ load_stations unless @stations_loaded
74
+ @stations
75
+ end
76
+
77
+ # Iterate over stations without loading all into memory
78
+ def each_station(&block)
79
+ return enum_for(:each_station) unless block_given?
80
+
81
+ if @stations_loaded
82
+ @stations.each(&block)
83
+ else
84
+ @file.seek(@stations_offset)
85
+ @bit = BitBuffer.new(@file)
86
+ parser = StationParser.new(@bit, @header, @lookup_tables)
87
+
88
+ @header.number_of_records.times do |i|
89
+ station = parser.parse(i)
90
+ yield station
91
+ end
92
+ end
93
+ end
94
+
95
+ # Find stations by name (substring match, case-insensitive)
96
+ def find_stations(query)
97
+ query_down = query.downcase
98
+ stations.select { |s| s.name.downcase.include?(query_down) }
99
+ end
100
+
101
+ # Find station by exact name
102
+ def station_by_name(name)
103
+ stations.find { |s| s.name == name }
104
+ end
105
+
106
+ # Get reference stations only
107
+ def reference_stations
108
+ stations.select(&:reference?)
109
+ end
110
+
111
+ # Get subordinate stations only
112
+ def subordinate_stations
113
+ stations.select(&:subordinate?)
114
+ end
115
+
116
+ # Infer missing constituents for a reference station.
117
+ # Requires the station to have non-zero values for M2, S2, K1, and O1.
118
+ # Returns true if inference was performed, false if not enough data.
119
+ def infer_constituents(station)
120
+ Inference.infer_constituents(station, @constituent_data)
121
+ end
122
+
123
+ # Find the nearest station to a given latitude/longitude.
124
+ # Uses simple Euclidean distance (suitable for nearby searches).
125
+ # For more accurate global searches, consider using the Haversine formula.
126
+ #
127
+ # @param lat [Float] Latitude in decimal degrees
128
+ # @param lon [Float] Longitude in decimal degrees
129
+ # @param type [Symbol, nil] Optional filter: :reference, :subordinate, or nil for all
130
+ # @return [Station, nil] The nearest station, or nil if no stations found
131
+ def nearest_station(lat, lon, type: nil)
132
+ candidates = case type
133
+ when :reference then reference_stations
134
+ when :subordinate then subordinate_stations
135
+ else stations
136
+ end
137
+
138
+ return nil if candidates.empty?
139
+
140
+ candidates.min_by do |s|
141
+ dlat = lat - s.latitude
142
+ dlon = lon - s.longitude
143
+ dlat * dlat + dlon * dlon
144
+ end
145
+ end
146
+
147
+ # Find stations within a given radius of a latitude/longitude.
148
+ # Uses simple Euclidean distance in degrees.
149
+ #
150
+ # @param lat [Float] Latitude in decimal degrees
151
+ # @param lon [Float] Longitude in decimal degrees
152
+ # @param radius [Float] Radius in degrees (roughly: 1° ≈ 111 km at equator)
153
+ # @param type [Symbol, nil] Optional filter: :reference, :subordinate, or nil for all
154
+ # @return [Array<Station>] Stations within the radius, sorted by distance
155
+ def stations_near(lat, lon, radius:, type: nil)
156
+ candidates = case type
157
+ when :reference then reference_stations
158
+ when :subordinate then subordinate_stations
159
+ else stations
160
+ end
161
+
162
+ radius_sq = radius * radius
163
+
164
+ candidates.select do |s|
165
+ dlat = lat - s.latitude
166
+ dlon = lon - s.longitude
167
+ dlat * dlat + dlon * dlon <= radius_sq
168
+ end.sort_by do |s|
169
+ dlat = lat - s.latitude
170
+ dlon = lon - s.longitude
171
+ dlat * dlat + dlon * dlon
172
+ end
173
+ end
174
+
175
+ # Summary statistics
176
+ def stats
177
+ all = stations
178
+ {
179
+ total_stations: all.size,
180
+ reference_stations: all.count(&:reference?),
181
+ subordinate_stations: all.count(&:subordinate?),
182
+ constituents: constituent_count,
183
+ countries: @lookup_tables.countries.size,
184
+ timezones: @lookup_tables.timezones.size,
185
+ datums: @lookup_tables.datums.size,
186
+ year_range: year_range,
187
+ file_size: file_size
188
+ }
189
+ end
190
+
191
+ private
192
+
193
+ def load_metadata
194
+ # Parse ASCII header
195
+ @header = Header.new(@file)
196
+
197
+ # Load lookup tables (handles interleaved string tables and binary data)
198
+ # This also identifies the constituent data offset and station records offset
199
+ @lookup_tables = LookupTables.new(@file, @header)
200
+
201
+ # Load constituent data (speeds, equilibrium, node factors) from tracked offset
202
+ @file.seek(@lookup_tables.constituent_data_offset)
203
+ @bit = BitBuffer.new(@file)
204
+ @constituent_data = ConstituentData.new(@bit, @header, @lookup_tables)
205
+
206
+ # Station records start after constituent names (tracked by lookup_tables)
207
+ @stations_offset = @lookup_tables.station_records_offset
208
+ end
209
+
210
+ def load_stations
211
+ return if @stations_loaded
212
+
213
+ @file.seek(@stations_offset)
214
+ @bit = BitBuffer.new(@file)
215
+ parser = StationParser.new(@bit, @header, @lookup_tables)
216
+
217
+ @stations = @header.number_of_records.times.map do |i|
218
+ parser.parse(i)
219
+ end
220
+
221
+ @stations_loaded = true
222
+ end
223
+ end
224
+ end
@@ -0,0 +1,333 @@
1
+ # frozen_string_literal: true
2
+
3
+ module TCD
4
+ # Null value constants per TCD spec
5
+ NULLSLACKOFFSET = 0xA00 # 2560 - null indicator for slack offsets
6
+ NULL_DIRECTION = 361 # Null indicator for direction fields
7
+
8
+ # Base station record with fields common to all station types
9
+ Station = Struct.new(
10
+ # Header/index fields
11
+ :record_number, # Implicit record index (0-based)
12
+ :record_size, # Size of this record in bytes
13
+ :record_type, # 1 = reference, 2 = subordinate
14
+ :latitude, # Decimal degrees (-90 to 90)
15
+ :longitude, # Decimal degrees (-180 to 180)
16
+ :tzfile, # Timezone file name
17
+ :name, # Station name
18
+ :reference_station, # Index of reference station (-1 if self)
19
+
20
+ # Metadata fields
21
+ :country, # Country name
22
+ :source, # Data source attribution
23
+ :restriction, # Access restriction text
24
+ :comments, # Comments field
25
+ :notes, # Notes field
26
+ :legalese, # Legal notice text
27
+ :station_id_context, # Station ID context
28
+ :station_id, # Station ID
29
+ :date_imported, # Import date (YYYYMMDD or 0)
30
+ :xfields, # Extended fields (string)
31
+ :direction_units, # Direction units (degrees true, etc.)
32
+ :min_direction, # Minimum direction
33
+ :max_direction, # Maximum direction
34
+ :level_units, # Level units (feet, meters, etc.)
35
+
36
+ # Type 1 (Reference) specific fields
37
+ :datum, # Datum name (e.g., "MLLW")
38
+ :datum_offset, # Datum offset (Z0) in level units
39
+ :zone_offset, # Time zone offset from GMT0 (integer +/-HHMM)
40
+ :expiration_date, # Expiration date (YYYYMMDD or 0)
41
+ :months_on_station, # Months of observation data
42
+ :last_date_on_station, # Last date on station (YYYYMMDD)
43
+ :confidence, # Confidence value (0-15)
44
+ :amplitudes, # Array of amplitudes per constituent
45
+ :epochs, # Array of epochs (phases) per constituent
46
+
47
+ # Type 2 (Subordinate) specific fields
48
+ :min_time_add, # Minutes to add to reference low tide time
49
+ :max_time_add, # Minutes to add to reference high tide time
50
+ :flood_begins, # Flood begins offset (for currents)
51
+ :ebb_begins, # Ebb begins offset (for currents)
52
+ :min_level_add, # Value to add to reference low level
53
+ :max_level_add, # Value to add to reference high level
54
+ :min_level_multiply, # Multiplier for reference low level
55
+ :max_level_multiply, # Multiplier for reference high level
56
+
57
+ keyword_init: true
58
+ ) do
59
+ def reference?
60
+ record_type == 1
61
+ end
62
+
63
+ def subordinate?
64
+ record_type == 2
65
+ end
66
+
67
+ # Count of non-zero amplitude constituents
68
+ def active_constituents
69
+ return 0 unless amplitudes
70
+ amplitudes.count { |a| a && a > 0 }
71
+ end
72
+
73
+ # Check if this is a "simple" subordinate station.
74
+ # A simple subordinate has identical high/low offsets, no direction data,
75
+ # and no flood/ebb slack times. This is common for tide stations
76
+ # (as opposed to current stations which have direction/slack data).
77
+ #
78
+ # @return [Boolean] true if simple, false otherwise (always false for reference stations)
79
+ def simple?
80
+ return false unless subordinate?
81
+
82
+ max_time_add == min_time_add &&
83
+ max_level_add == min_level_add &&
84
+ max_level_multiply == min_level_multiply &&
85
+ min_direction.nil? &&
86
+ max_direction.nil? &&
87
+ flood_begins.nil? &&
88
+ ebb_begins.nil?
89
+ end
90
+
91
+ # Check if this station has current (not tide) data.
92
+ # Current stations have direction fields and/or flood/ebb slack times.
93
+ # These are the definitive indicators of current data.
94
+ #
95
+ # @return [Boolean] true if this is a current station
96
+ def current?
97
+ # Reference stations with direction data are current stations
98
+ return true if min_direction || max_direction
99
+
100
+ # Subordinate stations with flood/ebb times are current stations
101
+ return true if flood_begins || ebb_begins
102
+
103
+ false
104
+ end
105
+
106
+ # Check if this station has tide (not current) data.
107
+ # Tide stations do not have current-specific indicators (direction, flood/ebb).
108
+ # Note: subordinate tide stations may have different high/low corrections.
109
+ #
110
+ # @return [Boolean] true if this is a tide station
111
+ def tide?
112
+ !current?
113
+ end
114
+
115
+ def to_s
116
+ type_str = reference? ? "Reference" : "Subordinate"
117
+ "#{name} (#{type_str}) @ #{format('%.5f', latitude)}, #{format('%.5f', longitude)}"
118
+ end
119
+ end
120
+
121
+ # Parser for station records from bit-packed binary data
122
+ class StationParser
123
+ def initialize(bit_buffer, header, lookup_tables)
124
+ @bit = bit_buffer
125
+ @header = header
126
+ @lookup = lookup_tables
127
+ end
128
+
129
+ # Parse a single station record at current position
130
+ def parse(record_number)
131
+ start_pos = @bit.pos
132
+
133
+ # ============================================
134
+ # Partial header (common to all record types)
135
+ # Per libtcd unpack_partial_tide_record()
136
+ # ============================================
137
+
138
+ # Record size and type
139
+ record_size = @bit.read_uint(@header.record_size_bits)
140
+ record_type = @bit.read_uint(@header.record_type_bits)
141
+
142
+ # Geographic coordinates
143
+ latitude = @bit.read_int(@header.latitude_bits).to_f / @header.latitude_scale
144
+ longitude = @bit.read_int(@header.longitude_bits).to_f / @header.longitude_scale
145
+
146
+ # Timezone file index (comes BEFORE name in TCD format)
147
+ tzfile_idx = @bit.read_uint(@header.tzfile_bits)
148
+ tzfile = @lookup.timezone(tzfile_idx)
149
+
150
+ # Station name (null-terminated string)
151
+ name = @bit.read_cstring
152
+
153
+ # Reference station index (-1 for reference stations referring to themselves)
154
+ reference_station = @bit.read_int(@header.station_bits)
155
+
156
+ # ============================================
157
+ # Extended fields (V2 format)
158
+ # Per libtcd unpack_tide_record() case 2
159
+ # ============================================
160
+
161
+ # Country
162
+ country_idx = @bit.read_uint(@header.country_bits)
163
+ country = @lookup.country(country_idx)
164
+
165
+ # Source string
166
+ source = @bit.read_cstring
167
+
168
+ # Restriction
169
+ restriction_idx = @bit.read_uint(@header.restriction_bits)
170
+ restriction = @lookup.restriction(restriction_idx)
171
+
172
+ # Comments and notes
173
+ comments = @bit.read_cstring
174
+ notes = @bit.read_cstring
175
+
176
+ # Legalese
177
+ legalese_idx = @bit.read_uint(@header.legalese_bits)
178
+ legalese = @lookup.legalese_text(legalese_idx)
179
+
180
+ # Station ID fields
181
+ station_id_context = @bit.read_cstring
182
+ station_id = @bit.read_cstring
183
+
184
+ # Date imported (YYYYMMDD integer)
185
+ date_imported = @bit.read_uint(@header.date_bits)
186
+
187
+ # xfields (extended fields string)
188
+ xfields = @bit.read_cstring
189
+
190
+ # Direction units
191
+ direction_units_idx = @bit.read_uint(@header.direction_unit_bits)
192
+ direction_units = @lookup.direction_unit(direction_units_idx)
193
+
194
+ # Min/max direction
195
+ min_direction = @bit.read_uint(@header.direction_bits)
196
+ max_direction = @bit.read_uint(@header.direction_bits)
197
+ min_direction = nil if min_direction == NULL_DIRECTION
198
+ max_direction = nil if max_direction == NULL_DIRECTION
199
+
200
+ # Level units
201
+ level_units_idx = @bit.read_uint(@header.level_unit_bits)
202
+ level_units = @lookup.level_unit(level_units_idx)
203
+
204
+ # Build base station
205
+ station = Station.new(
206
+ record_number: record_number,
207
+ record_size: record_size,
208
+ record_type: record_type,
209
+ latitude: latitude,
210
+ longitude: longitude,
211
+ tzfile: tzfile,
212
+ name: name,
213
+ reference_station: reference_station,
214
+ country: country,
215
+ source: source,
216
+ restriction: restriction,
217
+ comments: comments,
218
+ notes: notes,
219
+ legalese: legalese,
220
+ station_id_context: station_id_context,
221
+ station_id: station_id,
222
+ date_imported: date_imported,
223
+ xfields: xfields,
224
+ direction_units: direction_units,
225
+ min_direction: min_direction,
226
+ max_direction: max_direction,
227
+ level_units: level_units
228
+ )
229
+
230
+ # Parse type-specific fields
231
+ if record_type == 1
232
+ parse_reference_fields(station)
233
+ else
234
+ parse_subordinate_fields(station)
235
+ end
236
+
237
+ # Ensure we're at the right position for the next record
238
+ # Record size is total bytes from start of record
239
+ expected_end = start_pos + record_size
240
+ @bit.seek(expected_end)
241
+
242
+ station
243
+ end
244
+
245
+ private
246
+
247
+ def parse_reference_fields(station)
248
+ # V2 Reference station fields (in order per libtcd)
249
+
250
+ # Datum offset (Z0) - signed
251
+ station.datum_offset = @bit.read_int(@header.datum_offset_bits).to_f / @header.datum_offset_scale
252
+
253
+ # Datum
254
+ datum_idx = @bit.read_uint(@header.datum_bits)
255
+ station.datum = @lookup.datum(datum_idx)
256
+
257
+ # Zone offset (integer +/-HHMM) - signed
258
+ station.zone_offset = @bit.read_int(@header.time_bits)
259
+
260
+ # Expiration date (YYYYMMDD)
261
+ station.expiration_date = @bit.read_uint(@header.date_bits)
262
+
263
+ # Months on station
264
+ station.months_on_station = @bit.read_uint(@header.months_on_station_bits)
265
+
266
+ # Last date on station
267
+ station.last_date_on_station = @bit.read_uint(@header.date_bits)
268
+
269
+ # Confidence value
270
+ station.confidence = @bit.read_uint(@header.confidence_value_bits)
271
+
272
+ # Initialize amplitude/epoch arrays
273
+ num_constituents = @header.constituents
274
+ station.amplitudes = Array.new(num_constituents, 0.0)
275
+ station.epochs = Array.new(num_constituents, 0.0)
276
+
277
+ # Read the count of non-zero constituents
278
+ count = @bit.read_uint(@header.constituent_bits)
279
+
280
+ # Read each constituent's index, amplitude, and epoch
281
+ count.times do
282
+ idx = @bit.read_uint(@header.constituent_bits)
283
+ amplitude = @bit.read_uint(@header.amplitude_bits).to_f / @header.amplitude_scale
284
+ epoch = @bit.read_uint(@header.epoch_bits).to_f / @header.epoch_scale
285
+
286
+ if idx < num_constituents
287
+ station.amplitudes[idx] = amplitude
288
+ station.epochs[idx] = epoch
289
+ end
290
+ end
291
+ end
292
+
293
+ def parse_subordinate_fields(station)
294
+ # V2 Subordinate station fields (in order per libtcd)
295
+ # Note: V2 has a different order than the documentation suggests!
296
+
297
+ # Time offsets (signed)
298
+ station.min_time_add = decode_time_offset(@bit.read_int(@header.time_bits))
299
+
300
+ # Level add (signed)
301
+ station.min_level_add = @bit.read_int(@header.level_add_bits).to_f / @header.level_add_scale
302
+
303
+ # Level multiply (UNSIGNED in V2!)
304
+ min_mult_raw = @bit.read_uint(@header.level_multiply_bits)
305
+ station.min_level_multiply = min_mult_raw == 0 ? 1.0 : min_mult_raw.to_f / @header.level_multiply_scale
306
+
307
+ station.max_time_add = decode_time_offset(@bit.read_int(@header.time_bits))
308
+
309
+ station.max_level_add = @bit.read_int(@header.level_add_bits).to_f / @header.level_add_scale
310
+
311
+ max_mult_raw = @bit.read_uint(@header.level_multiply_bits)
312
+ station.max_level_multiply = max_mult_raw == 0 ? 1.0 : max_mult_raw.to_f / @header.level_multiply_scale
313
+
314
+ # Flood/ebb begins (signed)
315
+ flood_raw = @bit.read_int(@header.time_bits)
316
+ ebb_raw = @bit.read_int(@header.time_bits)
317
+
318
+ # Check for null slack offsets
319
+ station.flood_begins = (flood_raw == NULLSLACKOFFSET) ? nil : decode_time_offset(flood_raw)
320
+ station.ebb_begins = (ebb_raw == NULLSLACKOFFSET) ? nil : decode_time_offset(ebb_raw)
321
+ end
322
+
323
+ # Decode time offset from hours*100+minutes format to total minutes
324
+ def decode_time_offset(raw)
325
+ return 0 if raw == 0
326
+ sign = raw < 0 ? -1 : 1
327
+ raw = raw.abs
328
+ hours = raw / 100
329
+ minutes = raw % 100
330
+ sign * (hours * 60 + minutes)
331
+ end
332
+ end
333
+ end
@@ -0,0 +1,5 @@
1
+ # frozen_string_literal: true
2
+
3
+ module TCD
4
+ VERSION = "1.0.2"
5
+ end
data/lib/tcd.rb ADDED
@@ -0,0 +1,23 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "tcd/version"
4
+ require_relative "tcd/reader"
5
+ require_relative "tcd/inference"
6
+
7
+ module TCD
8
+ class << self
9
+ # Open a TCD file and return a Reader instance
10
+ def open(path)
11
+ reader = Reader.new(path)
12
+ if block_given?
13
+ begin
14
+ yield reader
15
+ ensure
16
+ reader.close
17
+ end
18
+ else
19
+ reader
20
+ end
21
+ end
22
+ end
23
+ end
metadata ADDED
@@ -0,0 +1,90 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: tcd
3
+ version: !ruby/object:Gem::Version
4
+ version: 1.0.2
5
+ platform: ruby
6
+ authors:
7
+ - Jordan Ritter
8
+ bindir: bin
9
+ cert_chain: []
10
+ date: 1980-01-02 00:00:00.000000000 Z
11
+ dependencies:
12
+ - !ruby/object:Gem::Dependency
13
+ name: minitest
14
+ requirement: !ruby/object:Gem::Requirement
15
+ requirements:
16
+ - - "~>"
17
+ - !ruby/object:Gem::Version
18
+ version: '5.0'
19
+ type: :development
20
+ prerelease: false
21
+ version_requirements: !ruby/object:Gem::Requirement
22
+ requirements:
23
+ - - "~>"
24
+ - !ruby/object:Gem::Version
25
+ version: '5.0'
26
+ - !ruby/object:Gem::Dependency
27
+ name: rake
28
+ requirement: !ruby/object:Gem::Requirement
29
+ requirements:
30
+ - - "~>"
31
+ - !ruby/object:Gem::Version
32
+ version: '13.0'
33
+ type: :development
34
+ prerelease: false
35
+ version_requirements: !ruby/object:Gem::Requirement
36
+ requirements:
37
+ - - "~>"
38
+ - !ruby/object:Gem::Version
39
+ version: '13.0'
40
+ description: A pure Ruby gem for reading TCD files containing tidal harmonic constituents
41
+ and station data used by XTide for tide predictions. No C extensions or external
42
+ dependencies required.
43
+ email:
44
+ - jpr5@darkridge.com
45
+ executables:
46
+ - tcd-info
47
+ extensions: []
48
+ extra_rdoc_files: []
49
+ files:
50
+ - CHANGELOG.md
51
+ - LICENSE
52
+ - README.md
53
+ - bin/tcd-info
54
+ - lib/tcd.rb
55
+ - lib/tcd/bit_buffer.rb
56
+ - lib/tcd/constituent.rb
57
+ - lib/tcd/header.rb
58
+ - lib/tcd/inference.rb
59
+ - lib/tcd/lookup_tables.rb
60
+ - lib/tcd/reader.rb
61
+ - lib/tcd/station.rb
62
+ - lib/tcd/version.rb
63
+ homepage: https://github.com/jpr5/tcd
64
+ licenses:
65
+ - MIT
66
+ metadata:
67
+ homepage_uri: https://github.com/jpr5/tcd
68
+ source_code_uri: https://github.com/jpr5/tcd
69
+ changelog_uri: https://github.com/jpr5/tcd/blob/master/CHANGELOG.md
70
+ bug_tracker_uri: https://github.com/jpr5/tcd/issues
71
+ documentation_uri: https://rubydoc.info/gems/tcd
72
+ rubygems_mfa_required: 'true'
73
+ rdoc_options: []
74
+ require_paths:
75
+ - lib
76
+ required_ruby_version: !ruby/object:Gem::Requirement
77
+ requirements:
78
+ - - ">="
79
+ - !ruby/object:Gem::Version
80
+ version: 2.7.0
81
+ required_rubygems_version: !ruby/object:Gem::Requirement
82
+ requirements:
83
+ - - ">="
84
+ - !ruby/object:Gem::Version
85
+ version: '0'
86
+ requirements: []
87
+ rubygems_version: 3.6.9
88
+ specification_version: 4
89
+ summary: Pure Ruby reader for XTide TCD (Tidal Constituent Database) files
90
+ test_files: []