dotlyte 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,425 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "json"
4
+ require "set"
5
+
6
+ module Dotlyte
7
+ # Abstract source interface. Implement #load to return a Hash.
8
+ module Source
9
+ def load
10
+ raise NotImplementedError, "#{self.class}#load must return a Hash"
11
+ end
12
+ end
13
+
14
+ # Main loader orchestrator (v2).
15
+ class Loader
16
+ # System env vars that are never imported automatically.
17
+ SYSTEM_ENV_BLOCKLIST = Set.new(%w[
18
+ PATH HOME USER SHELL TERM LANG LC_ALL LOGNAME HOSTNAME
19
+ PWD OLDPWD SHLVL TMPDIR EDITOR VISUAL PAGER DISPLAY
20
+ SSH_AUTH_SOCK SSH_AGENT_PID GPG_AGENT_INFO
21
+ COLORTERM TERM_PROGRAM TERM_PROGRAM_VERSION
22
+ XPC_FLAGS XPC_SERVICE_NAME COMMAND_MODE
23
+ LS_COLORS LSCOLORS CLICOLOR GREP_OPTIONS
24
+ COMP_WORDBREAKS HISTSIZE HISTFILESIZE HISTCONTROL
25
+ ]).freeze
26
+
27
+ SYSTEM_PREFIXES = %w[
28
+ npm_ VSCODE_ ELECTRON_ CHROME_ GITHUB_ CI_ GITLAB_
29
+ JENKINS_ TRAVIS_ CIRCLECI_ HOMEBREW_ JAVA_HOME GOPATH
30
+ NVM_ RVM_ RBENV_ PYENV_ CONDA_ VIRTUAL_ENV CARGO_HOME
31
+ ].freeze
32
+
33
+ DEFAULT_ROOT_MARKERS = %w[
34
+ .git .hg package.json Gemfile go.mod Cargo.toml build.gradle
35
+ pom.xml pyproject.toml .dotlyte
36
+ ].freeze
37
+
38
+ def initialize(**options)
39
+ @files = options[:files]
40
+ @prefix = options[:prefix]
41
+ @defaults = options[:defaults] || {}
42
+ @sources = options[:sources]
43
+ @env = options[:env]
44
+ @schema = options[:schema]
45
+ @strict = options.fetch(:strict, false)
46
+ @interpolate_vars = options.fetch(:interpolate_vars, true)
47
+ @overrides = options[:overrides] || {}
48
+ @debug = options.fetch(:debug, false)
49
+ @find_up = options.fetch(:find_up, false)
50
+ @root_markers = options[:root_markers] || DEFAULT_ROOT_MARKERS
51
+ @cwd = options[:cwd] || Dir.pwd
52
+ @allow_all_env_vars = options.fetch(:allow_all_env_vars, false)
53
+ @watch = options.fetch(:watch, false)
54
+ @debounce_ms = options[:debounce_ms] || 300
55
+ @custom_sources = options[:custom_sources] || []
56
+ end
57
+
58
+ def load
59
+ base_dir = @find_up ? find_base_dir : @cwd
60
+ layers = []
61
+
62
+ if @files && !@files.empty?
63
+ # Explicit file mode
64
+ @files.each do |f|
65
+ full = File.expand_path(f, base_dir)
66
+ unless File.exist?(full)
67
+ raise FileError.new(
68
+ "Config file not found: #{full}",
69
+ file_path: full
70
+ )
71
+ end
72
+ data = parse_file_by_extension(full)
73
+ layers << data if data && !data.empty?
74
+ end
75
+ elsif @sources
76
+ @sources.each do |source|
77
+ data = load_source(source, base_dir)
78
+ layers << data if data && !data.empty?
79
+ end
80
+ else
81
+ append_if(layers, @defaults)
82
+ append_if(layers, load_yaml_files(base_dir))
83
+ append_if(layers, load_json_files(base_dir))
84
+ append_if(layers, load_dotenv_files(base_dir))
85
+ append_if(layers, load_env_vars)
86
+ end
87
+
88
+ # Custom sources
89
+ @custom_sources.each do |src|
90
+ data = src.respond_to?(:load) ? src.load : nil
91
+ layers << data if data.is_a?(Hash) && !data.empty?
92
+ end
93
+
94
+ # Overrides (highest priority)
95
+ append_if(layers, @overrides)
96
+
97
+ merged = {}
98
+ layers.each { |layer| merged = Merger.deep_merge(merged, layer) }
99
+
100
+ # Interpolation
101
+ if @interpolate_vars
102
+ merged = Interpolation.interpolate_deep(merged)
103
+ end
104
+
105
+ # Schema defaults
106
+ if @schema
107
+ Validator.apply_defaults(merged, @schema)
108
+ end
109
+
110
+ # Decryption
111
+ enc_key = Encryption.resolve_encryption_key(@env)
112
+ decrypt_recursive!(merged, enc_key) if enc_key
113
+
114
+ # Schema validation
115
+ if @schema
116
+ Validator.assert_valid!(merged, @schema, strict: @strict) if @strict
117
+ end
118
+
119
+ # Sensitive keys
120
+ sensitive = Set.new
121
+ sensitive.merge(Validator.sensitive_keys(@schema)) if @schema
122
+ sensitive.merge(Masking.build_sensitive_set(merged))
123
+
124
+ config = Config.new(merged, schema: @schema, sensitive_keys: sensitive)
125
+
126
+ # Watch
127
+ if @watch
128
+ watched_files = collect_watched_files(base_dir)
129
+ unless watched_files.empty?
130
+ watcher = ConfigWatcher.new(files: watched_files, debounce_ms: @debounce_ms)
131
+ reload_fn = -> { self.class.new(**reload_options).load_raw }
132
+ watcher.start(reload_fn)
133
+ end
134
+ end
135
+
136
+ config
137
+ end
138
+
139
+ # Load raw data (no Config wrapping) for watcher reloads.
140
+ def load_raw
141
+ base_dir = @find_up ? find_base_dir : @cwd
142
+ layers = []
143
+
144
+ if @files && !@files.empty?
145
+ @files.each do |f|
146
+ full = File.expand_path(f, base_dir)
147
+ next unless File.exist?(full)
148
+
149
+ data = parse_file_by_extension(full)
150
+ layers << data if data && !data.empty?
151
+ end
152
+ else
153
+ append_if(layers, @defaults)
154
+ append_if(layers, load_yaml_files(base_dir))
155
+ append_if(layers, load_json_files(base_dir))
156
+ append_if(layers, load_dotenv_files(base_dir))
157
+ append_if(layers, load_env_vars)
158
+ end
159
+
160
+ merged = {}
161
+ layers.each { |layer| merged = Merger.deep_merge(merged, layer) }
162
+ merged
163
+ end
164
+
165
+ private
166
+
167
+ def append_if(layers, data)
168
+ layers << data if data && !data.empty?
169
+ end
170
+
171
+ def find_base_dir
172
+ dir = File.expand_path(@cwd)
173
+ loop do
174
+ return dir if @root_markers.any? { |m| File.exist?(File.join(dir, m)) }
175
+
176
+ parent = File.dirname(dir)
177
+ return @cwd if parent == dir # reached filesystem root
178
+
179
+ dir = parent
180
+ end
181
+ end
182
+
183
+ def load_source(name, base_dir)
184
+ case name
185
+ when "defaults" then @defaults
186
+ when "yaml" then load_yaml_files(base_dir)
187
+ when "json" then load_json_files(base_dir)
188
+ when "dotenv" then load_dotenv_files(base_dir)
189
+ when "env" then load_env_vars
190
+ else {}
191
+ end
192
+ end
193
+
194
+ def load_dotenv_files(base_dir)
195
+ candidates = [".env"]
196
+ candidates << ".env.#{@env}" if @env
197
+ candidates << ".env.local"
198
+
199
+ merged = {}
200
+ candidates.each do |filename|
201
+ full = File.join(base_dir, filename)
202
+ next unless File.exist?(full)
203
+
204
+ data = parse_dotenv(full)
205
+ merged = Merger.deep_merge(merged, data)
206
+ end
207
+ merged
208
+ end
209
+
210
+ def parse_dotenv(filepath)
211
+ result = {}
212
+ content = File.read(filepath)
213
+ lines = content.split("\n")
214
+ i = 0
215
+
216
+ while i < lines.length
217
+ line = lines[i].strip
218
+ i += 1
219
+
220
+ next if line.empty? || line.start_with?("#")
221
+
222
+ line = line.sub(/\Aexport\s+/, "")
223
+
224
+ unless line.include?("=")
225
+ raise ParseError.new(
226
+ "Invalid syntax in #{filepath}:#{i}: expected KEY=VALUE, got: #{line.inspect}"
227
+ )
228
+ end
229
+
230
+ key, _, value = line.partition("=")
231
+ key = key.strip
232
+ value = value.strip
233
+
234
+ # Inline comment removal (only for unquoted values)
235
+ if value.length >= 2 && %w[" ' `].include?(value[0])
236
+ quote = value[0]
237
+ if quote == "'" || quote == "`"
238
+ # Single-quoted or backtick: find closing quote
239
+ end_idx = value.index(quote, 1)
240
+ value = end_idx ? value[1...end_idx] : value[1..]
241
+ else
242
+ # Double-quoted: check for multiline
243
+ stripped = value[1..]
244
+ if stripped.include?('"')
245
+ end_idx = stripped.index('"')
246
+ value = stripped[0...end_idx]
247
+ # Process escape sequences
248
+ value = process_escapes(value)
249
+ else
250
+ # Multiline value
251
+ buf = +stripped
252
+ while i < lines.length
253
+ buf << "\n"
254
+ buf << lines[i]
255
+ i += 1
256
+ if buf.include?('"')
257
+ end_idx = buf.rindex('"')
258
+ value = process_escapes(buf[0...end_idx])
259
+ break
260
+ end
261
+ end
262
+ end
263
+ end
264
+ else
265
+ # Unquoted — strip inline comment
266
+ comment_idx = value.index(" #")
267
+ value = value[0...comment_idx].rstrip if comment_idx
268
+ end
269
+
270
+ result[key.downcase] = Coercion.coerce(value)
271
+ end
272
+ result
273
+ end
274
+
275
+ def process_escapes(s)
276
+ s.gsub("\\n", "\n")
277
+ .gsub("\\t", "\t")
278
+ .gsub("\\r", "\r")
279
+ .gsub('\\"', '"')
280
+ .gsub("\\\\", "\\")
281
+ end
282
+
283
+ def load_yaml_files(base_dir)
284
+ candidates = %w[config.yaml config.yml]
285
+ candidates += ["config.#{@env}.yaml", "config.#{@env}.yml"] if @env
286
+
287
+ merged = {}
288
+ candidates.each do |filename|
289
+ full = File.join(base_dir, filename)
290
+ next unless File.exist?(full)
291
+
292
+ begin
293
+ require "yaml"
294
+ data = YAML.safe_load(File.read(full), permitted_classes: [Date, Time])
295
+ merged = Merger.deep_merge(merged, data) if data.is_a?(Hash)
296
+ rescue StandardError
297
+ # YAML not available or invalid — skip
298
+ end
299
+ end
300
+ merged
301
+ end
302
+
303
+ def load_json_files(base_dir)
304
+ candidates = ["config.json"]
305
+ candidates << "config.#{@env}.json" if @env
306
+
307
+ merged = {}
308
+ candidates.each do |filename|
309
+ full = File.join(base_dir, filename)
310
+ next unless File.exist?(full)
311
+
312
+ data = JSON.parse(File.read(full))
313
+ merged = Merger.deep_merge(merged, data) if data.is_a?(Hash)
314
+ end
315
+ merged
316
+ end
317
+
318
+ def load_env_vars
319
+ result = {}
320
+ pfx = @prefix ? "#{@prefix.upcase}_" : nil
321
+
322
+ ENV.each do |key, value|
323
+ if pfx
324
+ next unless key.start_with?(pfx)
325
+
326
+ clean_key = key[pfx.length..].downcase
327
+ set_nested(result, clean_key, Coercion.coerce(value))
328
+ elsif @allow_all_env_vars
329
+ result[key.downcase] = Coercion.coerce(value)
330
+ else
331
+ # Filter out system env vars
332
+ next if SYSTEM_ENV_BLOCKLIST.include?(key)
333
+ next if SYSTEM_PREFIXES.any? { |p| key.start_with?(p) }
334
+
335
+ result[key.downcase] = Coercion.coerce(value)
336
+ end
337
+ end
338
+ result
339
+ end
340
+
341
+ def set_nested(data, key, value)
342
+ parts = key.split("_")
343
+ current = data
344
+
345
+ parts[0..-2].each do |part|
346
+ current[part] ||= {}
347
+ current = current[part]
348
+ end
349
+
350
+ current[parts.last] = value
351
+ end
352
+
353
+ def parse_file_by_extension(full_path)
354
+ ext = File.extname(full_path).downcase
355
+ case ext
356
+ when ".env"
357
+ parse_dotenv(full_path)
358
+ when ".yaml", ".yml"
359
+ require "yaml"
360
+ data = YAML.safe_load(File.read(full_path), permitted_classes: [Date, Time])
361
+ data.is_a?(Hash) ? data : {}
362
+ when ".json"
363
+ data = JSON.parse(File.read(full_path))
364
+ data.is_a?(Hash) ? data : {}
365
+ when ".toml"
366
+ load_toml(full_path)
367
+ else
368
+ # Try dotenv for unknown extensions
369
+ parse_dotenv(full_path)
370
+ end
371
+ end
372
+
373
+ def load_toml(full_path)
374
+ begin
375
+ require "toml-rb"
376
+ TomlRB.parse(File.read(full_path))
377
+ rescue LoadError
378
+ # toml-rb not available, skip
379
+ {}
380
+ end
381
+ end
382
+
383
+ def decrypt_recursive!(data, key_hex)
384
+ data.each do |k, v|
385
+ if v.is_a?(Hash)
386
+ decrypt_recursive!(v, key_hex)
387
+ elsif Encryption.encrypted?(v)
388
+ data[k] = Coercion.coerce(Encryption.decrypt_value(v, key_hex))
389
+ end
390
+ end
391
+ end
392
+
393
+ def collect_watched_files(base_dir)
394
+ files = []
395
+ candidates = %w[.env config.yaml config.yml config.json]
396
+ candidates << ".env.#{@env}" if @env
397
+ candidates << ".env.local"
398
+ candidates += ["config.#{@env}.yaml", "config.#{@env}.yml", "config.#{@env}.json"] if @env
399
+
400
+ candidates.each do |f|
401
+ full = File.join(base_dir, f)
402
+ files << full if File.exist?(full)
403
+ end
404
+
405
+ if @files
406
+ @files.each do |f|
407
+ full = File.expand_path(f, base_dir)
408
+ files << full if File.exist?(full)
409
+ end
410
+ end
411
+
412
+ files.uniq
413
+ end
414
+
415
+ def reload_options
416
+ {
417
+ files: @files, prefix: @prefix, defaults: @defaults,
418
+ sources: @sources, env: @env, schema: @schema, strict: @strict,
419
+ interpolate_vars: @interpolate_vars, overrides: @overrides,
420
+ find_up: @find_up, root_markers: @root_markers, cwd: @cwd,
421
+ allow_all_env_vars: @allow_all_env_vars, watch: false
422
+ }
423
+ end
424
+ end
425
+ end
@@ -0,0 +1,79 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Dotlyte
4
+ # Sensitive value masking for DOTLYTE v2.
5
+ module Masking
6
+ REDACTED = "[REDACTED]"
7
+
8
+ SENSITIVE_PATTERNS = [
9
+ /password/i, /secret/i, /token/i, /api[_-]?key/i,
10
+ /private[_-]?key/i, /access[_-]?key/i, /auth/i,
11
+ /credential/i, /connection[_-]?string/i, /dsn/i,
12
+ /encryption[_-]?key/i, /signing[_-]?key/i, /certificate/i
13
+ ].freeze
14
+
15
+ # Build the set of sensitive keys (auto-detected + schema).
16
+ #
17
+ # @param data [Hash] the config data
18
+ # @param schema_keys [Array<String>] keys marked sensitive in schema
19
+ # @return [Set<String>]
20
+ def self.build_sensitive_set(data, schema_keys = [])
21
+ set = Set.new(schema_keys)
22
+ flat_keys = flatten_keys(data)
23
+
24
+ flat_keys.each do |key|
25
+ SENSITIVE_PATTERNS.each do |pat|
26
+ if pat.match?(key)
27
+ set.add(key)
28
+ break
29
+ end
30
+ end
31
+ end
32
+
33
+ set
34
+ end
35
+
36
+ # Redact sensitive values in a deep hash.
37
+ #
38
+ # @param data [Hash]
39
+ # @param sensitive_keys [Set<String>]
40
+ # @return [Hash] copy with redacted values
41
+ def self.redact(data, sensitive_keys, prefix = "")
42
+ data.each_with_object({}) do |(key, value), result|
43
+ full_key = prefix.empty? ? key.to_s : "#{prefix}.#{key}"
44
+
45
+ result[key] = if sensitive_keys.include?(full_key)
46
+ REDACTED
47
+ elsif value.is_a?(Hash)
48
+ redact(value, sensitive_keys, full_key)
49
+ else
50
+ value
51
+ end
52
+ end
53
+ end
54
+
55
+ # Partially show a value: first 2 chars visible, rest masked.
56
+ def self.format_redacted(value)
57
+ return REDACTED if value.nil?
58
+ return "*" * value.length if value.length <= 4
59
+
60
+ value[0..1] + "*" * (value.length - 2)
61
+ end
62
+
63
+ class << self
64
+ private
65
+
66
+ def flatten_keys(data, prefix = "", out = [])
67
+ data.each do |key, value|
68
+ full_key = prefix.empty? ? key.to_s : "#{prefix}.#{key}"
69
+ if value.is_a?(Hash)
70
+ flatten_keys(value, full_key, out)
71
+ else
72
+ out << full_key
73
+ end
74
+ end
75
+ out
76
+ end
77
+ end
78
+ end
79
+ end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Dotlyte
4
+ # Deep merge utility.
5
+ module Merger
6
+ # Deep merge two hashes. Values in override take precedence.
7
+ def self.deep_merge(base, override)
8
+ result = base.dup
9
+
10
+ override.each do |key, value|
11
+ result[key] = if result.key?(key) && result[key].is_a?(Hash) && value.is_a?(Hash)
12
+ deep_merge(result[key], value)
13
+ else
14
+ value
15
+ end
16
+ end
17
+
18
+ result
19
+ end
20
+ end
21
+ end