mbeditor 0.4.5 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,6 +4,7 @@ require "fileutils"
4
4
  require "open3"
5
5
  require "shellwords"
6
6
  require "tempfile"
7
+ require "timeout"
7
8
  require "tmpdir"
8
9
 
9
10
  module Mbeditor
@@ -45,7 +46,12 @@ module Mbeditor
45
46
 
46
47
  # GET /mbeditor/files — recursive file tree
47
48
  def files
48
- tree = build_tree(workspace_root.to_s)
49
+ root = workspace_root.to_s
50
+ cached = cached_file_tree(root)
51
+ return render json: cached if cached
52
+
53
+ tree = build_tree(root)
54
+ store_file_tree(root, tree)
49
55
  render json: tree
50
56
  end
51
57
 
@@ -69,7 +75,9 @@ module Mbeditor
69
75
 
70
76
  # POST /mbeditor/state — save workspace state
71
77
  def save_state
72
- payload = params[:state].to_json
78
+ raw = params[:state]
79
+ raw = raw.to_unsafe_h if raw.respond_to?(:to_unsafe_h)
80
+ payload = raw.to_json
73
81
  return render json: { error: "State payload too large" }, status: :content_too_large if payload.bytesize > STATE_MAX_BYTES
74
82
 
75
83
  path = workspace_root.join("tmp", "mbeditor_workspace.json")
@@ -319,6 +327,77 @@ module Mbeditor
319
327
  render json: { error: e.message }, status: :unprocessable_content
320
328
  end
321
329
 
330
+ # GET /mbeditor/module_members?name=ArticlesHelper
331
+ # Returns methods defined in the workspace file that defines the named module/class.
332
+ def module_members
333
+ name = params[:name].to_s.strip
334
+ return render json: { error: "Invalid name" }, status: :bad_request \
335
+ unless name.match?(/\A[A-Z][a-zA-Z0-9_]*\z/)
336
+
337
+ file = RubyDefinitionService.module_defined_in(
338
+ workspace_root, name,
339
+ excluded_dirnames: excluded_dirnames,
340
+ excluded_paths: excluded_paths
341
+ )
342
+ return render json: { name: name, methods: [] } unless file
343
+
344
+ defs = RubyDefinitionService.defs_in_file(file)
345
+ methods = defs.flat_map do |method_name, entries|
346
+ entries.map { |e| { name: method_name, line: e[:line], signature: e[:signature], file: relative_path(file) } }
347
+ end
348
+ render json: { name: name, file: relative_path(file), methods: methods }
349
+ rescue StandardError => e
350
+ render json: { error: e.message }, status: :unprocessable_content
351
+ end
352
+
353
+ # GET /mbeditor/file_includes?path=app/models/article.rb
354
+ # Returns included/extended/prepended module names and their methods.
355
+ def file_includes
356
+ path = resolve_path(params[:path])
357
+ return render json: { error: "Forbidden" }, status: :forbidden unless path
358
+
359
+ # Ensure workspace is scanned so include_calls are populated in the cache.
360
+ # Fast no-op on subsequent calls (mtime checks only).
361
+ RubyDefinitionService.scan(workspace_root,
362
+ excluded_dirnames: excluded_dirnames,
363
+ excluded_paths: excluded_paths)
364
+
365
+ module_names = RubyDefinitionService.includes_in_file(path)
366
+ includes = module_names.filter_map do |mod_name|
367
+ mod_file = RubyDefinitionService.module_defined_in(
368
+ workspace_root, mod_name,
369
+ excluded_dirnames: excluded_dirnames,
370
+ excluded_paths: excluded_paths
371
+ )
372
+ next unless mod_file
373
+
374
+ defs = RubyDefinitionService.defs_in_file(mod_file)
375
+ methods = defs.flat_map do |method_name, entries|
376
+ entries.map { |e| { name: method_name, line: e[:line], signature: e[:signature] } }
377
+ end
378
+ { name: mod_name, file: relative_path(mod_file), methods: methods }
379
+ end
380
+ render json: { includes: includes }
381
+ rescue StandardError => e
382
+ render json: { error: e.message }, status: :unprocessable_content
383
+ end
384
+
385
+ # GET /mbeditor/unused_methods?path=app/models/article.rb
386
+ # Returns method names defined in the file that have no call-sites in the workspace.
387
+ def unused_methods
388
+ path = resolve_path(params[:path])
389
+ return render json: { error: "Forbidden" }, status: :forbidden unless path
390
+
391
+ unused = UnusedMethodsService.call(
392
+ workspace_root, path,
393
+ excluded_dirnames: excluded_dirnames,
394
+ excluded_paths: excluded_paths
395
+ )
396
+ render json: { unused: unused }
397
+ rescue StandardError => e
398
+ render json: { error: e.message }, status: :unprocessable_content
399
+ end
400
+
322
401
  # GET /mbeditor/search?q=...&offset=0&limit=50&regex=false&match_case=false&whole_word=false
323
402
  def search
324
403
  query = params[:q].to_s.strip
@@ -350,6 +429,104 @@ module Mbeditor
350
429
  render json: { error: e.message }, status: :unprocessable_content
351
430
  end
352
431
 
432
+ MAX_REPLACE_FILES = 500
433
+
434
+ # POST /mbeditor/replace_in_files
435
+ # Replaces a string/pattern across all matching files in the workspace.
436
+ # Returns { replaced_count:, files_affected:[], errors:[], partial: }
437
+ def replace_in_files
438
+ query = params[:query].to_s.strip
439
+ replacement = params[:replacement].to_s
440
+ use_regex = params[:regex] == 'true'
441
+ match_case = params[:match_case] == 'true'
442
+ whole_word = params[:whole_word] == 'true'
443
+
444
+ return render json: { error: "Query is required" }, status: :bad_request if query.blank?
445
+ return render json: { error: "Query too long" }, status: :bad_request if query.length > 500
446
+
447
+ # Collect all unique file paths that have at least one match.
448
+ # Use a large limit to get all matching files; stream_search_results handles deduplication by file internally.
449
+ raw_results = stream_search_results(query, 10_000, use_regex: use_regex, match_case: match_case, whole_word: whole_word)
450
+ file_paths = raw_results.map { |r| r[:file] }.uniq
451
+
452
+ # Fix 3: Cap the number of files to process
453
+ if file_paths.length > MAX_REPLACE_FILES
454
+ return render json: { error: "Too many files matched (#{file_paths.length}). Narrow your search." }, status: :unprocessable_entity
455
+ end
456
+
457
+ replaced_count = 0
458
+ files_affected = []
459
+ errors = []
460
+
461
+ # Build the Ruby Regexp to use for gsub
462
+ begin
463
+ pattern = if use_regex
464
+ flags = match_case ? 0 : Regexp::IGNORECASE
465
+ Regexp.new(whole_word ? "\\b(?:#{query})\\b" : query, flags)
466
+ else
467
+ flags = match_case ? 0 : Regexp::IGNORECASE
468
+ Regexp.new(whole_word ? "\\b#{Regexp.escape(query)}\\b" : Regexp.escape(query), flags)
469
+ end
470
+ rescue RegexpError => e
471
+ return render json: { error: "Invalid regex: #{e.message}" }, status: :bad_request
472
+ end
473
+
474
+ file_paths.each do |rel_path|
475
+ full_path = resolve_path(rel_path)
476
+ unless full_path
477
+ errors << { file: rel_path, error: "Forbidden" }
478
+ next
479
+ end
480
+
481
+ # Fix 2: Check path_blocked_for_operations?
482
+ if path_blocked_for_operations?(full_path)
483
+ errors << { file: rel_path, error: "Forbidden" }
484
+ next
485
+ end
486
+
487
+ unless File.file?(full_path)
488
+ errors << { file: rel_path, error: "File not found" }
489
+ next
490
+ end
491
+ if File.size(full_path) > MAX_OPEN_FILE_SIZE_BYTES
492
+ errors << { file: rel_path, error: "File too large" }
493
+ next
494
+ end
495
+
496
+ # Fix 1: Wrap per-file gsub/scan in a timeout to prevent ReDoS
497
+ begin
498
+ Timeout::timeout(5) do
499
+ content = File.binread(full_path).force_encoding("UTF-8")
500
+ replacements_in_file = content.scan(pattern).length
501
+ new_content = content.gsub(pattern, replacement)
502
+
503
+ # Fix 4: Use new_content != content instead of delta logic
504
+ if new_content != content
505
+ File.binwrite(full_path, new_content.encode("UTF-8", invalid: :replace, undef: :replace))
506
+ files_affected << rel_path
507
+ replaced_count += replacements_in_file
508
+ end
509
+ end
510
+ rescue Timeout::Error
511
+ errors << { file: rel_path, error: "Timed out processing file" }
512
+ next
513
+ rescue StandardError => e
514
+ errors << { file: rel_path, error: e.message }
515
+ next
516
+ end
517
+ end
518
+
519
+ # Fix 5: Surface partial failure
520
+ render json: {
521
+ replaced_count: replaced_count,
522
+ files_affected: files_affected,
523
+ errors: errors,
524
+ partial: errors.any? && files_affected.any?
525
+ }
526
+ rescue StandardError => e
527
+ render json: { error: e.message }, status: :unprocessable_content
528
+ end
529
+
353
530
  # GET /mbeditor/git_status
354
531
  def git_status
355
532
  output, _err, status = Open3.capture3("git", "-C", workspace_root.to_s, "status", "--porcelain")
@@ -363,63 +540,84 @@ module Mbeditor
363
540
  # GET /mbeditor/git_info
364
541
  def git_info
365
542
  repo = workspace_root.to_s
543
+ cached = cached_git_info(repo)
544
+ return render json: cached if cached
545
+
366
546
  branch = GitService.current_branch(repo)
367
547
  unless branch
368
548
  return render json: { ok: false, error: "Unable to determine current branch" }, status: :unprocessable_content
369
549
  end
370
- working_output, _err, working_status = Open3.capture3("git", "-C", repo, "status", "--porcelain")
550
+
551
+ # Wave 1: all independent git reads run in parallel
552
+ status_t = Thread.new { Open3.capture3("git", "-C", repo, "status", "--porcelain") }
553
+ numstat_t = Thread.new { Open3.capture3("git", "-C", repo, "diff", "--numstat", "HEAD") }
554
+ upstream_t = Thread.new { Open3.capture3("git", "-C", repo, "rev-parse", "--abbrev-ref", "--symbolic-full-name", "@{u}") }
555
+ base_t = Thread.new { GitService.find_branch_base(repo, branch) }
556
+
557
+ working_output, _err, working_status = status_t.value
371
558
  working_tree = working_status.success? ? parse_porcelain_status(working_output) : []
372
559
 
373
- # Annotate each working-tree file with added/removed line counts
374
- numstat_out, = Open3.capture3("git", "-C", repo, "diff", "--numstat", "HEAD")
375
- numstat_map = GitService.parse_numstat(numstat_out)
560
+ numstat_out = numstat_t.value.first
561
+ numstat_map = GitService.parse_numstat(numstat_out)
376
562
  working_tree = working_tree.map { |f| f.merge(numstat_map.fetch(f[:path], {})) }
377
563
 
378
- upstream_output, _err, upstream_status = Open3.capture3("git", "-C", repo, "rev-parse", "--abbrev-ref", "--symbolic-full-name", "@{u}")
564
+ upstream_output, _err, upstream_status = upstream_t.value
379
565
  upstream_branch = upstream_status.success? ? upstream_output.strip : nil
380
566
  upstream_branch = nil unless upstream_branch&.match?(%r{\A[\w./-]+\z})
381
567
 
382
- ahead_count = 0
383
- behind_count = 0
568
+ # Determine the branch's fork point relative to a base branch (develop/main/master).
569
+ # This ensures History and Changes only show work unique to this branch.
570
+ base_sha, base_ref = base_t.value
571
+
572
+ ahead_count = 0
573
+ behind_count = 0
384
574
  unpushed_files = []
385
575
  unpushed_commits = []
576
+ diff_base = base_sha || upstream_branch
386
577
 
387
- # Determine the branch's fork point relative to a base branch (develop/main/master).
388
- # This ensures History and Changes only show work unique to this branch.
389
- base_sha, base_ref = GitService.find_branch_base(repo, branch)
578
+ # Wave 2: conditional parallel reads that depend on Wave 1 results
579
+ wave2 = {}
580
+ wave2[:counts] = Thread.new { Open3.capture3("git", "-C", repo, "rev-list", "--left-right", "--count", "HEAD...#{upstream_branch}") } if upstream_branch.present?
581
+ wave2[:unp_log] = Thread.new { Open3.capture3("git", "-C", repo, "log", "#{upstream_branch}..HEAD", "--pretty=format:%H%x1f%s%x1f%an%x1f%aI%x1e") } if upstream_branch.present?
582
+ wave2[:diff_name] = Thread.new { Open3.capture3("git", "-C", repo, "diff", "--name-status", "#{diff_base}..HEAD") } if diff_base.present?
583
+ wave2[:diff_num] = Thread.new { Open3.capture3("git", "-C", repo, "diff", "--numstat", "#{diff_base}..HEAD") } if diff_base.present?
584
+ wave2[:branch_log] = Thread.new do
585
+ if base_sha
586
+ Open3.capture3("git", "-C", repo, "log", "--first-parent", "#{base_sha}..HEAD",
587
+ "--pretty=format:%H%x1f%s%x1f%an%x1f%aI%x1e")
588
+ else
589
+ Open3.capture3("git", "-C", repo, "log", "--first-parent", branch, "-n", "100",
590
+ "--pretty=format:%H%x1f%s%x1f%an%x1f%aI%x1e")
591
+ end
592
+ end
593
+
594
+ wave2.each_value(&:join)
390
595
 
391
- if upstream_branch.present?
392
- counts_output, _err, counts_status = Open3.capture3("git", "-C", repo, "rev-list", "--left-right", "--count", "HEAD...#{upstream_branch}")
596
+ if (ct = wave2[:counts])
597
+ counts_output, _err, counts_status = ct.value
393
598
  if counts_status.success?
394
599
  ahead_str, behind_str = counts_output.strip.split("\t", 2)
395
- ahead_count = ahead_str.to_i
600
+ ahead_count = ahead_str.to_i
396
601
  behind_count = behind_str.to_i
397
602
  end
603
+ end
398
604
 
399
- unpushed_log_output, _err, unpushed_log_status = Open3.capture3("git", "-C", repo, "log", "#{upstream_branch}..HEAD", "--pretty=format:%H%x1f%s%x1f%an%x1f%aI%x1e")
605
+ if (ul = wave2[:unp_log])
606
+ unpushed_log_output, _err, unpushed_log_status = ul.value
400
607
  unpushed_commits = GitService.parse_git_log(unpushed_log_output) if unpushed_log_status.success?
401
608
  end
402
609
 
403
- # "Changes in Branch" use the merge-base against the base branch when available
404
- # so that files changed in develop (and merged into this branch) are excluded.
405
- diff_base = base_sha || upstream_branch
406
- if diff_base.present?
407
- unpushed_output, _err, unpushed_status = Open3.capture3("git", "-C", repo, "diff", "--name-status", "#{diff_base}..HEAD")
408
- if unpushed_status.success?
409
- unpushed_files = parse_name_status(unpushed_output)
410
- unp_numstat_out, = Open3.capture3("git", "-C", repo, "diff", "--numstat", "#{diff_base}..HEAD")
411
- unp_numstat_map = GitService.parse_numstat(unp_numstat_out)
412
- unpushed_files = unpushed_files.map { |f| f.merge(unp_numstat_map.fetch(f[:path], {})) }
610
+ if (dn = wave2[:diff_name]) && (dnum = wave2[:diff_num])
611
+ diff_name_out, _err, diff_name_status = dn.value
612
+ if diff_name_status.success?
613
+ unpushed_files = parse_name_status(diff_name_out)
614
+ unp_numstat_out = dnum.value.first
615
+ unp_numstat_map = GitService.parse_numstat(unp_numstat_out)
616
+ unpushed_files = unpushed_files.map { |f| f.merge(unp_numstat_map.fetch(f[:path], {})) }
413
617
  end
414
618
  end
415
619
 
416
- branch_log_output, _err, branch_log_status = if base_sha
417
- Open3.capture3("git", "-C", repo, "log", "--first-parent", "#{base_sha}..HEAD",
418
- "--pretty=format:%H%x1f%s%x1f%an%x1f%aI%x1e")
419
- else
420
- Open3.capture3("git", "-C", repo, "log", "--first-parent", branch, "-n", "100",
421
- "--pretty=format:%H%x1f%s%x1f%an%x1f%aI%x1e")
422
- end
620
+ branch_log_output, _err, branch_log_status = wave2[:branch_log].value
423
621
  branch_commits = branch_log_status.success? ? GitService.parse_git_log(branch_log_output) : []
424
622
 
425
623
  redmine_ticket_id = nil
@@ -438,7 +636,7 @@ module Mbeditor
438
636
  end
439
637
  end
440
638
 
441
- render json: {
639
+ payload = {
442
640
  ok: true,
443
641
  branch: branch,
444
642
  upstreamBranch: upstream_branch,
@@ -451,6 +649,8 @@ module Mbeditor
451
649
  branchBaseRef: base_ref,
452
650
  redmineTicketId: redmine_ticket_id
453
651
  }
652
+ store_git_info(repo, payload)
653
+ render json: payload
454
654
  rescue StandardError => e
455
655
  render json: { ok: false, error: e.message }, status: :unprocessable_content
456
656
  end
@@ -536,7 +736,7 @@ module Mbeditor
536
736
  return render json: { markers: markers }
537
737
  end
538
738
 
539
- cmd = rubocop_command + ["--no-server", "--cache", "false", "--stdin", filename, "--format", "json", "--no-color", "--force-exclusion"]
739
+ cmd = rubocop_command + ["--no-server", "--stdin", filename, "--format", "json", "--no-color", "--force-exclusion"]
540
740
  env = { 'RUBOCOP_CACHE_ROOT' => File.join(Dir.tmpdir, 'rubocop') }
541
741
  output = run_with_timeout(env, cmd, stdin_data: code)
542
742
 
@@ -597,7 +797,7 @@ module Mbeditor
597
797
  f.flush
598
798
  tmpfile = f.path
599
799
 
600
- cmd = rubocop_command + ["--no-server", "--cache", "false", "-A", "--no-color", tmpfile]
800
+ cmd = rubocop_command + ["--no-server", "-A", "--no-color", tmpfile]
601
801
  env = { 'RUBOCOP_CACHE_ROOT' => File.join(Dir.tmpdir, 'rubocop') }
602
802
  _out, _err, status = Open3.capture3(env, *cmd)
603
803
 
@@ -660,7 +860,7 @@ module Mbeditor
660
860
  f.flush
661
861
  tmpfile = f.path
662
862
 
663
- cmd = rubocop_command + ["--no-server", "--cache", "false", "-A", "--no-color", tmpfile]
863
+ cmd = rubocop_command + ["--no-server", "-A", "--no-color", tmpfile]
664
864
  env = { 'RUBOCOP_CACHE_ROOT' => File.join(Dir.tmpdir, 'rubocop') }
665
865
  _out, _err, status = Open3.capture3(env, *cmd)
666
866
  unless status.success? || status.exitstatus == 1
@@ -677,6 +877,10 @@ module Mbeditor
677
877
  private
678
878
 
679
879
  def broadcast_files_changed
880
+ root = workspace_root.to_s
881
+ invalidate_file_tree_cache(root)
882
+ invalidate_git_info_cache(root)
883
+
680
884
  return unless defined?(ActionCable.server)
681
885
 
682
886
  ActionCable.server.broadcast("mbeditor_editor", { type: "files_changed" })
@@ -838,8 +1042,6 @@ module Mbeditor
838
1042
  full = File.join(dir, name)
839
1043
  rel = relative_path(full)
840
1044
 
841
- next if excluded_path?(rel, name)
842
-
843
1045
  if File.directory?(full)
844
1046
  { name: name, type: "folder", path: rel, children: build_tree(full, depth: depth + 1) }
845
1047
  else
@@ -966,8 +1168,10 @@ module Mbeditor
966
1168
  candidate.exist? ? ".rubocop.yml" : nil
967
1169
  end
968
1170
 
969
- PROBE_MUTEX = Mutex.new
970
- private_constant :PROBE_MUTEX
1171
+ PROBE_MUTEX = Mutex.new
1172
+ GIT_INFO_MUTEX = Mutex.new
1173
+ FILE_TREE_MUTEX = Mutex.new
1174
+ private_constant :PROBE_MUTEX, :GIT_INFO_MUTEX, :FILE_TREE_MUTEX
971
1175
 
972
1176
  def rubocop_available?
973
1177
  key = Mbeditor.configuration.rubocop_command.to_s
@@ -994,6 +1198,56 @@ module Mbeditor
994
1198
  end
995
1199
  end
996
1200
 
1201
+ def cached_git_info(repo, ttl: 5)
1202
+ GIT_INFO_MUTEX.synchronize do
1203
+ cache = self.class.instance_variable_get(:@git_info_cache) || {}
1204
+ entry = cache[repo]
1205
+ return entry[:data] if entry && (Process.clock_gettime(Process::CLOCK_MONOTONIC) - entry[:ts]) < ttl
1206
+ end
1207
+ nil
1208
+ end
1209
+
1210
+ def store_git_info(repo, data)
1211
+ GIT_INFO_MUTEX.synchronize do
1212
+ cache = self.class.instance_variable_get(:@git_info_cache) || {}
1213
+ cache[repo] = { ts: Process.clock_gettime(Process::CLOCK_MONOTONIC), data: data }
1214
+ self.class.instance_variable_set(:@git_info_cache, cache)
1215
+ end
1216
+ end
1217
+
1218
+ def invalidate_git_info_cache(repo)
1219
+ GIT_INFO_MUTEX.synchronize do
1220
+ cache = self.class.instance_variable_get(:@git_info_cache) || {}
1221
+ cache.delete(repo)
1222
+ self.class.instance_variable_set(:@git_info_cache, cache)
1223
+ end
1224
+ end
1225
+
1226
+ def cached_file_tree(root, ttl: 15)
1227
+ FILE_TREE_MUTEX.synchronize do
1228
+ cache = self.class.instance_variable_get(:@file_tree_cache) || {}
1229
+ entry = cache[root]
1230
+ return entry[:data] if entry && (Process.clock_gettime(Process::CLOCK_MONOTONIC) - entry[:ts]) < ttl
1231
+ end
1232
+ nil
1233
+ end
1234
+
1235
+ def store_file_tree(root, data)
1236
+ FILE_TREE_MUTEX.synchronize do
1237
+ cache = self.class.instance_variable_get(:@file_tree_cache) || {}
1238
+ cache[root] = { ts: Process.clock_gettime(Process::CLOCK_MONOTONIC), data: data }
1239
+ self.class.instance_variable_set(:@file_tree_cache, cache)
1240
+ end
1241
+ end
1242
+
1243
+ def invalidate_file_tree_cache(root)
1244
+ FILE_TREE_MUTEX.synchronize do
1245
+ cache = self.class.instance_variable_get(:@file_tree_cache) || {}
1246
+ cache.delete(root)
1247
+ self.class.instance_variable_set(:@file_tree_cache, cache)
1248
+ end
1249
+ end
1250
+
997
1251
  def probe_cached(ivar, key, &block)
998
1252
  PROBE_MUTEX.synchronize do
999
1253
  cache = self.class.instance_variable_get(ivar) ||