t-ruby 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE +21 -0
- data/README.md +221 -0
- data/bin/trc +6 -0
- data/lib/t_ruby/benchmark.rb +592 -0
- data/lib/t_ruby/bundler_integration.rb +569 -0
- data/lib/t_ruby/cache.rb +774 -0
- data/lib/t_ruby/cli.rb +106 -0
- data/lib/t_ruby/compiler.rb +299 -0
- data/lib/t_ruby/config.rb +53 -0
- data/lib/t_ruby/constraint_checker.rb +441 -0
- data/lib/t_ruby/declaration_generator.rb +298 -0
- data/lib/t_ruby/doc_generator.rb +474 -0
- data/lib/t_ruby/error_handler.rb +132 -0
- data/lib/t_ruby/generic_type_parser.rb +68 -0
- data/lib/t_ruby/intersection_type_parser.rb +30 -0
- data/lib/t_ruby/ir.rb +1301 -0
- data/lib/t_ruby/lsp_server.rb +994 -0
- data/lib/t_ruby/package_manager.rb +735 -0
- data/lib/t_ruby/parser.rb +245 -0
- data/lib/t_ruby/parser_combinator.rb +942 -0
- data/lib/t_ruby/rbs_generator.rb +71 -0
- data/lib/t_ruby/runtime_validator.rb +367 -0
- data/lib/t_ruby/smt_solver.rb +1076 -0
- data/lib/t_ruby/type_alias_registry.rb +102 -0
- data/lib/t_ruby/type_checker.rb +770 -0
- data/lib/t_ruby/type_erasure.rb +26 -0
- data/lib/t_ruby/type_inferencer.rb +580 -0
- data/lib/t_ruby/union_type_parser.rb +38 -0
- data/lib/t_ruby/version.rb +5 -0
- data/lib/t_ruby/watcher.rb +320 -0
- data/lib/t_ruby.rb +42 -0
- metadata +87 -0
data/lib/t_ruby/cache.rb
ADDED
|
@@ -0,0 +1,774 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "digest"
|
|
4
|
+
require "json"
|
|
5
|
+
require "fileutils"
|
|
6
|
+
|
|
7
|
+
module TRuby
|
|
8
|
+
# Cache entry with metadata
|
|
9
|
+
class CacheEntry
|
|
10
|
+
attr_reader :key, :value, :created_at, :accessed_at, :hits
|
|
11
|
+
|
|
12
|
+
def initialize(key, value)
|
|
13
|
+
@key = key
|
|
14
|
+
@value = value
|
|
15
|
+
@created_at = Time.now
|
|
16
|
+
@accessed_at = Time.now
|
|
17
|
+
@hits = 0
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
def access
|
|
21
|
+
@accessed_at = Time.now
|
|
22
|
+
@hits += 1
|
|
23
|
+
@value
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
def stale?(max_age)
|
|
27
|
+
Time.now - @created_at > max_age
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def to_h
|
|
31
|
+
{
|
|
32
|
+
key: @key,
|
|
33
|
+
value: @value,
|
|
34
|
+
created_at: @created_at.to_i,
|
|
35
|
+
hits: @hits
|
|
36
|
+
}
|
|
37
|
+
end
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
# In-memory LRU cache
|
|
41
|
+
class MemoryCache
|
|
42
|
+
attr_reader :max_size, :hits, :misses
|
|
43
|
+
|
|
44
|
+
def initialize(max_size: 1000)
|
|
45
|
+
@max_size = max_size
|
|
46
|
+
@cache = {}
|
|
47
|
+
@access_order = []
|
|
48
|
+
@hits = 0
|
|
49
|
+
@misses = 0
|
|
50
|
+
@mutex = Mutex.new
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
def get(key)
|
|
54
|
+
@mutex.synchronize do
|
|
55
|
+
if @cache.key?(key)
|
|
56
|
+
@hits += 1
|
|
57
|
+
touch(key)
|
|
58
|
+
@cache[key].access
|
|
59
|
+
else
|
|
60
|
+
@misses += 1
|
|
61
|
+
nil
|
|
62
|
+
end
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
def set(key, value)
|
|
67
|
+
@mutex.synchronize do
|
|
68
|
+
evict if @cache.size >= @max_size && !@cache.key?(key)
|
|
69
|
+
|
|
70
|
+
@cache[key] = CacheEntry.new(key, value)
|
|
71
|
+
touch(key)
|
|
72
|
+
value
|
|
73
|
+
end
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
def delete(key)
|
|
77
|
+
@mutex.synchronize do
|
|
78
|
+
@cache.delete(key)
|
|
79
|
+
@access_order.delete(key)
|
|
80
|
+
end
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
def clear
|
|
84
|
+
@mutex.synchronize do
|
|
85
|
+
@cache.clear
|
|
86
|
+
@access_order.clear
|
|
87
|
+
@hits = 0
|
|
88
|
+
@misses = 0
|
|
89
|
+
end
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
def size
|
|
93
|
+
@cache.size
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
def hit_rate
|
|
97
|
+
total = @hits + @misses
|
|
98
|
+
return 0.0 if total.zero?
|
|
99
|
+
@hits.to_f / total
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
def stats
|
|
103
|
+
{
|
|
104
|
+
size: size,
|
|
105
|
+
max_size: @max_size,
|
|
106
|
+
hits: @hits,
|
|
107
|
+
misses: @misses,
|
|
108
|
+
hit_rate: hit_rate
|
|
109
|
+
}
|
|
110
|
+
end
|
|
111
|
+
|
|
112
|
+
private
|
|
113
|
+
|
|
114
|
+
def touch(key)
|
|
115
|
+
@access_order.delete(key)
|
|
116
|
+
@access_order.push(key)
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
def evict
|
|
120
|
+
return if @access_order.empty?
|
|
121
|
+
|
|
122
|
+
# Evict least recently used
|
|
123
|
+
oldest_key = @access_order.shift
|
|
124
|
+
@cache.delete(oldest_key)
|
|
125
|
+
end
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
# File-based persistent cache
|
|
129
|
+
class FileCache
|
|
130
|
+
attr_reader :cache_dir, :max_age
|
|
131
|
+
|
|
132
|
+
def initialize(cache_dir: ".t-ruby-cache", max_age: 3600)
|
|
133
|
+
@cache_dir = cache_dir
|
|
134
|
+
@max_age = max_age
|
|
135
|
+
FileUtils.mkdir_p(@cache_dir)
|
|
136
|
+
end
|
|
137
|
+
|
|
138
|
+
def get(key)
|
|
139
|
+
path = cache_path(key)
|
|
140
|
+
return nil unless File.exist?(path)
|
|
141
|
+
|
|
142
|
+
# Check if stale
|
|
143
|
+
if File.mtime(path) < Time.now - @max_age
|
|
144
|
+
File.delete(path)
|
|
145
|
+
return nil
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
data = File.read(path)
|
|
149
|
+
JSON.parse(data, symbolize_names: true)
|
|
150
|
+
rescue JSON::ParserError
|
|
151
|
+
File.delete(path)
|
|
152
|
+
nil
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
def set(key, value)
|
|
156
|
+
path = cache_path(key)
|
|
157
|
+
File.write(path, JSON.generate(value))
|
|
158
|
+
value
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
def delete(key)
|
|
162
|
+
path = cache_path(key)
|
|
163
|
+
File.delete(path) if File.exist?(path)
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
def clear
|
|
167
|
+
FileUtils.rm_rf(@cache_dir)
|
|
168
|
+
FileUtils.mkdir_p(@cache_dir)
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
def prune
|
|
172
|
+
Dir.glob(File.join(@cache_dir, "*.json")).each do |path|
|
|
173
|
+
File.delete(path) if File.mtime(path) < Time.now - @max_age
|
|
174
|
+
end
|
|
175
|
+
end
|
|
176
|
+
|
|
177
|
+
private
|
|
178
|
+
|
|
179
|
+
def cache_path(key)
|
|
180
|
+
hash = Digest::SHA256.hexdigest(key.to_s)[0, 16]
|
|
181
|
+
File.join(@cache_dir, "#{hash}.json")
|
|
182
|
+
end
|
|
183
|
+
end
|
|
184
|
+
|
|
185
|
+
# AST parse tree cache
|
|
186
|
+
class ParseCache
|
|
187
|
+
def initialize(memory_cache: nil, file_cache: nil)
|
|
188
|
+
@memory_cache = memory_cache || MemoryCache.new(max_size: 500)
|
|
189
|
+
@file_cache = file_cache
|
|
190
|
+
end
|
|
191
|
+
|
|
192
|
+
def get(source)
|
|
193
|
+
key = source_key(source)
|
|
194
|
+
|
|
195
|
+
# Try memory first
|
|
196
|
+
result = @memory_cache.get(key)
|
|
197
|
+
return result if result
|
|
198
|
+
|
|
199
|
+
# Try file cache
|
|
200
|
+
if @file_cache
|
|
201
|
+
result = @file_cache.get(key)
|
|
202
|
+
if result
|
|
203
|
+
@memory_cache.set(key, result)
|
|
204
|
+
return result
|
|
205
|
+
end
|
|
206
|
+
end
|
|
207
|
+
|
|
208
|
+
nil
|
|
209
|
+
end
|
|
210
|
+
|
|
211
|
+
def set(source, parse_result)
|
|
212
|
+
key = source_key(source)
|
|
213
|
+
|
|
214
|
+
@memory_cache.set(key, parse_result)
|
|
215
|
+
@file_cache&.set(key, parse_result)
|
|
216
|
+
|
|
217
|
+
parse_result
|
|
218
|
+
end
|
|
219
|
+
|
|
220
|
+
def invalidate(source)
|
|
221
|
+
key = source_key(source)
|
|
222
|
+
@memory_cache.delete(key)
|
|
223
|
+
@file_cache&.delete(key)
|
|
224
|
+
end
|
|
225
|
+
|
|
226
|
+
def stats
|
|
227
|
+
@memory_cache.stats
|
|
228
|
+
end
|
|
229
|
+
|
|
230
|
+
private
|
|
231
|
+
|
|
232
|
+
def source_key(source)
|
|
233
|
+
Digest::SHA256.hexdigest(source)
|
|
234
|
+
end
|
|
235
|
+
end
|
|
236
|
+
|
|
237
|
+
# Type resolution cache
|
|
238
|
+
class TypeResolutionCache
|
|
239
|
+
def initialize
|
|
240
|
+
@cache = MemoryCache.new(max_size: 2000)
|
|
241
|
+
end
|
|
242
|
+
|
|
243
|
+
def get(type_expression)
|
|
244
|
+
@cache.get(type_expression)
|
|
245
|
+
end
|
|
246
|
+
|
|
247
|
+
def set(type_expression, resolved_type)
|
|
248
|
+
@cache.set(type_expression, resolved_type)
|
|
249
|
+
end
|
|
250
|
+
|
|
251
|
+
def clear
|
|
252
|
+
@cache.clear
|
|
253
|
+
end
|
|
254
|
+
|
|
255
|
+
def stats
|
|
256
|
+
@cache.stats
|
|
257
|
+
end
|
|
258
|
+
end
|
|
259
|
+
|
|
260
|
+
# Declaration file cache
|
|
261
|
+
class DeclarationCache
|
|
262
|
+
def initialize(cache_dir: ".t-ruby-cache/declarations")
|
|
263
|
+
@file_cache = FileCache.new(cache_dir: cache_dir, max_age: 86400) # 24 hours
|
|
264
|
+
@memory_cache = MemoryCache.new(max_size: 200)
|
|
265
|
+
end
|
|
266
|
+
|
|
267
|
+
def get(file_path)
|
|
268
|
+
# Check modification time
|
|
269
|
+
return nil unless File.exist?(file_path)
|
|
270
|
+
|
|
271
|
+
mtime = File.mtime(file_path).to_i
|
|
272
|
+
cache_key = "#{file_path}:#{mtime}"
|
|
273
|
+
|
|
274
|
+
# Try memory first
|
|
275
|
+
result = @memory_cache.get(cache_key)
|
|
276
|
+
return result if result
|
|
277
|
+
|
|
278
|
+
# Try file cache
|
|
279
|
+
result = @file_cache.get(cache_key)
|
|
280
|
+
if result
|
|
281
|
+
@memory_cache.set(cache_key, result)
|
|
282
|
+
return result
|
|
283
|
+
end
|
|
284
|
+
|
|
285
|
+
nil
|
|
286
|
+
end
|
|
287
|
+
|
|
288
|
+
def set(file_path, declarations)
|
|
289
|
+
mtime = File.mtime(file_path).to_i
|
|
290
|
+
cache_key = "#{file_path}:#{mtime}"
|
|
291
|
+
|
|
292
|
+
@memory_cache.set(cache_key, declarations)
|
|
293
|
+
@file_cache.set(cache_key, declarations)
|
|
294
|
+
|
|
295
|
+
declarations
|
|
296
|
+
end
|
|
297
|
+
|
|
298
|
+
def clear
|
|
299
|
+
@memory_cache.clear
|
|
300
|
+
@file_cache.clear
|
|
301
|
+
end
|
|
302
|
+
end
|
|
303
|
+
|
|
304
|
+
# Incremental compilation support
|
|
305
|
+
class IncrementalCompiler
|
|
306
|
+
attr_reader :file_hashes, :dependencies
|
|
307
|
+
|
|
308
|
+
def initialize(compiler, cache: nil)
|
|
309
|
+
@compiler = compiler
|
|
310
|
+
@cache = cache || ParseCache.new
|
|
311
|
+
@file_hashes = {}
|
|
312
|
+
@dependencies = {}
|
|
313
|
+
@compiled_files = {}
|
|
314
|
+
end
|
|
315
|
+
|
|
316
|
+
# Check if file needs recompilation
|
|
317
|
+
def needs_compile?(file_path)
|
|
318
|
+
return true unless File.exist?(file_path)
|
|
319
|
+
|
|
320
|
+
current_hash = file_hash(file_path)
|
|
321
|
+
stored_hash = @file_hashes[file_path]
|
|
322
|
+
|
|
323
|
+
return true if stored_hash.nil? || stored_hash != current_hash
|
|
324
|
+
|
|
325
|
+
# Check dependencies
|
|
326
|
+
deps = @dependencies[file_path] || []
|
|
327
|
+
deps.any? { |dep| needs_compile?(dep) }
|
|
328
|
+
end
|
|
329
|
+
|
|
330
|
+
# Compile file with caching
|
|
331
|
+
def compile(file_path)
|
|
332
|
+
return @compiled_files[file_path] unless needs_compile?(file_path)
|
|
333
|
+
|
|
334
|
+
result = @compiler.compile(file_path)
|
|
335
|
+
@file_hashes[file_path] = file_hash(file_path)
|
|
336
|
+
@compiled_files[file_path] = result
|
|
337
|
+
|
|
338
|
+
result
|
|
339
|
+
end
|
|
340
|
+
|
|
341
|
+
# Compile multiple files, skipping unchanged
|
|
342
|
+
def compile_all(file_paths)
|
|
343
|
+
results = {}
|
|
344
|
+
to_compile = file_paths.select { |f| needs_compile?(f) }
|
|
345
|
+
|
|
346
|
+
to_compile.each do |file_path|
|
|
347
|
+
results[file_path] = compile(file_path)
|
|
348
|
+
end
|
|
349
|
+
|
|
350
|
+
results
|
|
351
|
+
end
|
|
352
|
+
|
|
353
|
+
# Register dependency between files
|
|
354
|
+
def add_dependency(file_path, depends_on)
|
|
355
|
+
@dependencies[file_path] ||= []
|
|
356
|
+
@dependencies[file_path] << depends_on unless @dependencies[file_path].include?(depends_on)
|
|
357
|
+
end
|
|
358
|
+
|
|
359
|
+
# Clear compilation cache
|
|
360
|
+
def clear
|
|
361
|
+
@file_hashes.clear
|
|
362
|
+
@dependencies.clear
|
|
363
|
+
@compiled_files.clear
|
|
364
|
+
@cache.stats # Just accessing for potential cleanup
|
|
365
|
+
end
|
|
366
|
+
|
|
367
|
+
private
|
|
368
|
+
|
|
369
|
+
def file_hash(file_path)
|
|
370
|
+
return nil unless File.exist?(file_path)
|
|
371
|
+
Digest::SHA256.hexdigest(File.read(file_path))
|
|
372
|
+
end
|
|
373
|
+
end
|
|
374
|
+
|
|
375
|
+
# Parallel file processor
|
|
376
|
+
class ParallelProcessor
|
|
377
|
+
attr_reader :thread_count
|
|
378
|
+
|
|
379
|
+
def initialize(thread_count: nil)
|
|
380
|
+
@thread_count = thread_count || determine_thread_count
|
|
381
|
+
end
|
|
382
|
+
|
|
383
|
+
# Process files in parallel
|
|
384
|
+
def process_files(file_paths, &block)
|
|
385
|
+
return [] if file_paths.empty?
|
|
386
|
+
|
|
387
|
+
# Split into batches
|
|
388
|
+
batches = file_paths.each_slice(batch_size(file_paths.length)).to_a
|
|
389
|
+
|
|
390
|
+
results = []
|
|
391
|
+
mutex = Mutex.new
|
|
392
|
+
|
|
393
|
+
threads = batches.map do |batch|
|
|
394
|
+
Thread.new do
|
|
395
|
+
batch_results = batch.map { |file| block.call(file) }
|
|
396
|
+
mutex.synchronize { results.concat(batch_results) }
|
|
397
|
+
end
|
|
398
|
+
end
|
|
399
|
+
|
|
400
|
+
threads.each(&:join)
|
|
401
|
+
results
|
|
402
|
+
end
|
|
403
|
+
|
|
404
|
+
# Process with work stealing
|
|
405
|
+
def process_with_queue(file_paths, &block)
|
|
406
|
+
queue = Queue.new
|
|
407
|
+
file_paths.each { |f| queue << f }
|
|
408
|
+
|
|
409
|
+
results = []
|
|
410
|
+
mutex = Mutex.new
|
|
411
|
+
|
|
412
|
+
threads = @thread_count.times.map do
|
|
413
|
+
Thread.new do
|
|
414
|
+
loop do
|
|
415
|
+
file = queue.pop(true) rescue break
|
|
416
|
+
result = block.call(file)
|
|
417
|
+
mutex.synchronize { results << result }
|
|
418
|
+
end
|
|
419
|
+
end
|
|
420
|
+
end
|
|
421
|
+
|
|
422
|
+
threads.each(&:join)
|
|
423
|
+
results
|
|
424
|
+
end
|
|
425
|
+
|
|
426
|
+
private
|
|
427
|
+
|
|
428
|
+
def determine_thread_count
|
|
429
|
+
# Use number of CPU cores, max 8
|
|
430
|
+
[Etc.nprocessors, 8].min
|
|
431
|
+
rescue
|
|
432
|
+
4
|
|
433
|
+
end
|
|
434
|
+
|
|
435
|
+
def batch_size(total)
|
|
436
|
+
[total / @thread_count, 1].max
|
|
437
|
+
end
|
|
438
|
+
end
|
|
439
|
+
|
|
440
|
+
# Cross-file Type Checker
|
|
441
|
+
class CrossFileTypeChecker
|
|
442
|
+
attr_reader :errors, :warnings, :file_types
|
|
443
|
+
|
|
444
|
+
def initialize(type_checker: nil)
|
|
445
|
+
@type_checker = type_checker || TypeChecker.new
|
|
446
|
+
@file_types = {} # file_path => { types: [], functions: [], interfaces: [] }
|
|
447
|
+
@global_registry = {} # name => { file: path, kind: :type/:func/:interface, definition: ... }
|
|
448
|
+
@errors = []
|
|
449
|
+
@warnings = []
|
|
450
|
+
end
|
|
451
|
+
|
|
452
|
+
# Register types from a file
|
|
453
|
+
def register_file(file_path, ir_program)
|
|
454
|
+
types = []
|
|
455
|
+
functions = []
|
|
456
|
+
interfaces = []
|
|
457
|
+
|
|
458
|
+
ir_program.declarations.each do |decl|
|
|
459
|
+
case decl
|
|
460
|
+
when IR::TypeAlias
|
|
461
|
+
types << { name: decl.name, definition: decl.definition }
|
|
462
|
+
register_global(decl.name, file_path, :type, decl)
|
|
463
|
+
when IR::Interface
|
|
464
|
+
interfaces << { name: decl.name, members: decl.members }
|
|
465
|
+
register_global(decl.name, file_path, :interface, decl)
|
|
466
|
+
when IR::MethodDef
|
|
467
|
+
functions << { name: decl.name, params: decl.params, return_type: decl.return_type }
|
|
468
|
+
register_global(decl.name, file_path, :function, decl)
|
|
469
|
+
end
|
|
470
|
+
end
|
|
471
|
+
|
|
472
|
+
@file_types[file_path] = { types: types, functions: functions, interfaces: interfaces }
|
|
473
|
+
end
|
|
474
|
+
|
|
475
|
+
# Check cross-file type consistency
|
|
476
|
+
def check_all
|
|
477
|
+
@errors = []
|
|
478
|
+
@warnings = []
|
|
479
|
+
|
|
480
|
+
# Check for duplicate definitions
|
|
481
|
+
check_duplicate_definitions
|
|
482
|
+
|
|
483
|
+
# Check for unresolved type references
|
|
484
|
+
check_unresolved_references
|
|
485
|
+
|
|
486
|
+
# Check interface implementations
|
|
487
|
+
check_interface_implementations
|
|
488
|
+
|
|
489
|
+
{
|
|
490
|
+
success: @errors.empty?,
|
|
491
|
+
errors: @errors,
|
|
492
|
+
warnings: @warnings
|
|
493
|
+
}
|
|
494
|
+
end
|
|
495
|
+
|
|
496
|
+
# Check a specific file against global types
|
|
497
|
+
def check_file(file_path, ir_program)
|
|
498
|
+
file_errors = []
|
|
499
|
+
|
|
500
|
+
ir_program.declarations.each do |decl|
|
|
501
|
+
case decl
|
|
502
|
+
when IR::MethodDef
|
|
503
|
+
# Check parameter types
|
|
504
|
+
decl.params.each do |param|
|
|
505
|
+
if param.type_annotation
|
|
506
|
+
unless type_exists?(param.type_annotation)
|
|
507
|
+
file_errors << {
|
|
508
|
+
file: file_path,
|
|
509
|
+
message: "Unknown type '#{type_name(param.type_annotation)}' in parameter '#{param.name}'"
|
|
510
|
+
}
|
|
511
|
+
end
|
|
512
|
+
end
|
|
513
|
+
end
|
|
514
|
+
|
|
515
|
+
# Check return type
|
|
516
|
+
if decl.return_type
|
|
517
|
+
unless type_exists?(decl.return_type)
|
|
518
|
+
file_errors << {
|
|
519
|
+
file: file_path,
|
|
520
|
+
message: "Unknown return type '#{type_name(decl.return_type)}' in function '#{decl.name}'"
|
|
521
|
+
}
|
|
522
|
+
end
|
|
523
|
+
end
|
|
524
|
+
end
|
|
525
|
+
end
|
|
526
|
+
|
|
527
|
+
file_errors
|
|
528
|
+
end
|
|
529
|
+
|
|
530
|
+
# Get all registered types
|
|
531
|
+
def all_types
|
|
532
|
+
@global_registry.keys
|
|
533
|
+
end
|
|
534
|
+
|
|
535
|
+
# Find where a type is defined
|
|
536
|
+
def find_definition(name)
|
|
537
|
+
@global_registry[name]
|
|
538
|
+
end
|
|
539
|
+
|
|
540
|
+
# Clear all registrations
|
|
541
|
+
def clear
|
|
542
|
+
@file_types.clear
|
|
543
|
+
@global_registry.clear
|
|
544
|
+
@errors.clear
|
|
545
|
+
@warnings.clear
|
|
546
|
+
end
|
|
547
|
+
|
|
548
|
+
private
|
|
549
|
+
|
|
550
|
+
def register_global(name, file_path, kind, definition)
|
|
551
|
+
if @global_registry[name] && @global_registry[name][:file] != file_path
|
|
552
|
+
# Duplicate definition from different file
|
|
553
|
+
@warnings << {
|
|
554
|
+
message: "#{kind.to_s.capitalize} '#{name}' defined in multiple files",
|
|
555
|
+
files: [@global_registry[name][:file], file_path]
|
|
556
|
+
}
|
|
557
|
+
end
|
|
558
|
+
|
|
559
|
+
@global_registry[name] = { file: file_path, kind: kind, definition: definition }
|
|
560
|
+
end
|
|
561
|
+
|
|
562
|
+
def check_duplicate_definitions
|
|
563
|
+
@global_registry.group_by { |_, v| v[:file] }.each do |file, entries|
|
|
564
|
+
# Check for duplicates within file
|
|
565
|
+
names = entries.map(&:first)
|
|
566
|
+
duplicates = names.select { |n| names.count(n) > 1 }.uniq
|
|
567
|
+
|
|
568
|
+
duplicates.each do |name|
|
|
569
|
+
@errors << {
|
|
570
|
+
file: file,
|
|
571
|
+
message: "Duplicate definition of '#{name}'"
|
|
572
|
+
}
|
|
573
|
+
end
|
|
574
|
+
end
|
|
575
|
+
end
|
|
576
|
+
|
|
577
|
+
def check_unresolved_references
|
|
578
|
+
@file_types.each do |file_path, info|
|
|
579
|
+
# Check type alias definitions for unresolved types
|
|
580
|
+
info[:types].each do |type_info|
|
|
581
|
+
referenced_types = extract_type_references(type_info[:definition])
|
|
582
|
+
referenced_types.each do |ref|
|
|
583
|
+
unless type_exists_by_name?(ref)
|
|
584
|
+
@errors << {
|
|
585
|
+
file: file_path,
|
|
586
|
+
message: "Unresolved type reference '#{ref}' in type alias '#{type_info[:name]}'"
|
|
587
|
+
}
|
|
588
|
+
end
|
|
589
|
+
end
|
|
590
|
+
end
|
|
591
|
+
end
|
|
592
|
+
end
|
|
593
|
+
|
|
594
|
+
def check_interface_implementations
|
|
595
|
+
# For future: check that classes implement all interface methods
|
|
596
|
+
end
|
|
597
|
+
|
|
598
|
+
def type_exists?(type_node)
|
|
599
|
+
case type_node
|
|
600
|
+
when IR::SimpleType
|
|
601
|
+
type_exists_by_name?(type_node.name)
|
|
602
|
+
when IR::GenericType
|
|
603
|
+
type_exists_by_name?(type_node.base)
|
|
604
|
+
when IR::UnionType
|
|
605
|
+
type_node.types.all? { |t| type_exists?(t) }
|
|
606
|
+
when IR::IntersectionType
|
|
607
|
+
type_node.types.all? { |t| type_exists?(t) }
|
|
608
|
+
when IR::NullableType
|
|
609
|
+
type_exists?(type_node.inner_type)
|
|
610
|
+
else
|
|
611
|
+
true # Assume valid for unknown types
|
|
612
|
+
end
|
|
613
|
+
end
|
|
614
|
+
|
|
615
|
+
def type_exists_by_name?(name)
|
|
616
|
+
return true if %w[String Integer Float Boolean Array Hash Symbol void nil Object Numeric Enumerable].include?(name)
|
|
617
|
+
return true if @global_registry[name]
|
|
618
|
+
false
|
|
619
|
+
end
|
|
620
|
+
|
|
621
|
+
def type_name(type_node)
|
|
622
|
+
case type_node
|
|
623
|
+
when IR::SimpleType
|
|
624
|
+
type_node.name
|
|
625
|
+
when IR::GenericType
|
|
626
|
+
"#{type_node.base}<...>"
|
|
627
|
+
else
|
|
628
|
+
type_node.to_s
|
|
629
|
+
end
|
|
630
|
+
end
|
|
631
|
+
|
|
632
|
+
def extract_type_references(definition)
|
|
633
|
+
return [] unless definition
|
|
634
|
+
|
|
635
|
+
case definition
|
|
636
|
+
when IR::SimpleType
|
|
637
|
+
[definition.name]
|
|
638
|
+
when IR::GenericType
|
|
639
|
+
[definition.base] + definition.type_args.flat_map { |t| extract_type_references(t) }
|
|
640
|
+
when IR::UnionType
|
|
641
|
+
definition.types.flat_map { |t| extract_type_references(t) }
|
|
642
|
+
when IR::IntersectionType
|
|
643
|
+
definition.types.flat_map { |t| extract_type_references(t) }
|
|
644
|
+
when IR::NullableType
|
|
645
|
+
extract_type_references(definition.inner_type)
|
|
646
|
+
else
|
|
647
|
+
[]
|
|
648
|
+
end
|
|
649
|
+
end
|
|
650
|
+
end
|
|
651
|
+
|
|
652
|
+
# Enhanced Incremental Compiler with IR and Cross-file support
|
|
653
|
+
class EnhancedIncrementalCompiler < IncrementalCompiler
|
|
654
|
+
attr_reader :cross_file_checker, :ir_cache
|
|
655
|
+
|
|
656
|
+
def initialize(compiler, cache: nil, enable_cross_file: true)
|
|
657
|
+
super(compiler, cache: cache)
|
|
658
|
+
@ir_cache = {} # file_path => IR::Program
|
|
659
|
+
@cross_file_checker = CrossFileTypeChecker.new if enable_cross_file
|
|
660
|
+
end
|
|
661
|
+
|
|
662
|
+
# Compile with IR caching
|
|
663
|
+
def compile_with_ir(file_path)
|
|
664
|
+
return @compiled_files[file_path] unless needs_compile?(file_path)
|
|
665
|
+
|
|
666
|
+
# Get IR from compiler
|
|
667
|
+
ir_program = @compiler.compile_to_ir(file_path)
|
|
668
|
+
@ir_cache[file_path] = ir_program
|
|
669
|
+
|
|
670
|
+
# Register with cross-file checker
|
|
671
|
+
@cross_file_checker&.register_file(file_path, ir_program)
|
|
672
|
+
|
|
673
|
+
# Compile from IR
|
|
674
|
+
result = @compiler.compile(file_path)
|
|
675
|
+
@file_hashes[file_path] = file_hash(file_path)
|
|
676
|
+
@compiled_files[file_path] = result
|
|
677
|
+
|
|
678
|
+
result
|
|
679
|
+
end
|
|
680
|
+
|
|
681
|
+
# Compile all with cross-file checking
|
|
682
|
+
def compile_all_with_checking(file_paths)
|
|
683
|
+
results = {}
|
|
684
|
+
errors = []
|
|
685
|
+
|
|
686
|
+
# First pass: compile and register all files
|
|
687
|
+
file_paths.each do |file_path|
|
|
688
|
+
begin
|
|
689
|
+
results[file_path] = compile_with_ir(file_path)
|
|
690
|
+
rescue => e
|
|
691
|
+
errors << { file: file_path, error: e.message }
|
|
692
|
+
end
|
|
693
|
+
end
|
|
694
|
+
|
|
695
|
+
# Second pass: cross-file type checking
|
|
696
|
+
if @cross_file_checker
|
|
697
|
+
check_result = @cross_file_checker.check_all
|
|
698
|
+
errors.concat(check_result[:errors])
|
|
699
|
+
end
|
|
700
|
+
|
|
701
|
+
{
|
|
702
|
+
results: results,
|
|
703
|
+
errors: errors,
|
|
704
|
+
success: errors.empty?
|
|
705
|
+
}
|
|
706
|
+
end
|
|
707
|
+
|
|
708
|
+
# Get cached IR for a file
|
|
709
|
+
def get_ir(file_path)
|
|
710
|
+
@ir_cache[file_path]
|
|
711
|
+
end
|
|
712
|
+
|
|
713
|
+
# Clear all caches
|
|
714
|
+
def clear
|
|
715
|
+
super
|
|
716
|
+
@ir_cache.clear
|
|
717
|
+
@cross_file_checker&.clear
|
|
718
|
+
end
|
|
719
|
+
|
|
720
|
+
private
|
|
721
|
+
|
|
722
|
+
def file_hash(file_path)
|
|
723
|
+
return nil unless File.exist?(file_path)
|
|
724
|
+
Digest::SHA256.hexdigest(File.read(file_path))
|
|
725
|
+
end
|
|
726
|
+
end
|
|
727
|
+
|
|
728
|
+
# Compilation profiler
|
|
729
|
+
class CompilationProfiler
|
|
730
|
+
def initialize
|
|
731
|
+
@timings = {}
|
|
732
|
+
@call_counts = {}
|
|
733
|
+
end
|
|
734
|
+
|
|
735
|
+
def profile(name, &block)
|
|
736
|
+
start = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
|
737
|
+
result = block.call
|
|
738
|
+
elapsed = Process.clock_gettime(Process::CLOCK_MONOTONIC) - start
|
|
739
|
+
|
|
740
|
+
@timings[name] ||= 0.0
|
|
741
|
+
@timings[name] += elapsed
|
|
742
|
+
|
|
743
|
+
@call_counts[name] ||= 0
|
|
744
|
+
@call_counts[name] += 1
|
|
745
|
+
|
|
746
|
+
result
|
|
747
|
+
end
|
|
748
|
+
|
|
749
|
+
def report
|
|
750
|
+
puts "=== Compilation Profile ==="
|
|
751
|
+
@timings.sort_by { |_, v| -v }.each do |name, time|
|
|
752
|
+
calls = @call_counts[name]
|
|
753
|
+
avg = time / calls
|
|
754
|
+
puts "#{name}: #{format('%.3f', time)}s total, #{calls} calls, #{format('%.3f', avg * 1000)}ms avg"
|
|
755
|
+
end
|
|
756
|
+
end
|
|
757
|
+
|
|
758
|
+
def reset
|
|
759
|
+
@timings.clear
|
|
760
|
+
@call_counts.clear
|
|
761
|
+
end
|
|
762
|
+
|
|
763
|
+
def to_h
|
|
764
|
+
@timings.map do |name, time|
|
|
765
|
+
{
|
|
766
|
+
name: name,
|
|
767
|
+
total_time: time,
|
|
768
|
+
call_count: @call_counts[name],
|
|
769
|
+
avg_time: time / @call_counts[name]
|
|
770
|
+
}
|
|
771
|
+
end
|
|
772
|
+
end
|
|
773
|
+
end
|
|
774
|
+
end
|