rails_mcp_code_search 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,172 @@
1
+ require "open3"
2
+ require "digest"
3
+
4
+ module RailsMcpCodeSearch
5
+ class Indexer
6
+ INCLUDE_PATTERNS = %w[**/*.rb **/*.erb **/*.js **/*.ts **/*.yml **/*.yaml **/*.md].freeze
7
+ EXCLUDE_PATTERNS = %w[vendor/ node_modules/ tmp/ log/ .git/].freeze
8
+ BATCH_SIZE = 50
9
+
10
+ NotAGitRepo = Class.new(StandardError)
11
+
12
+ attr_reader :errors
13
+
14
+ def initialize(embedding_adapter:, project_path: Dir.pwd, logger: nil)
15
+ @embedding_adapter = embedding_adapter
16
+ @project_path = File.realpath(project_path)
17
+ @logger = logger
18
+ @errors = []
19
+ end
20
+
21
+ def index_all
22
+ @errors = []
23
+ files = discover_files
24
+ return if files.empty?
25
+
26
+ process_files(files)
27
+ update_metadata
28
+ end
29
+
30
+ def index_files(file_paths)
31
+ @errors = []
32
+ safe_paths = file_paths.select { valid_path?(_1) }
33
+ process_files(safe_paths)
34
+ end
35
+
36
+ def changed_files
37
+ @_changed_files_cache ||= {}
38
+ now = Process.clock_gettime(Process::CLOCK_MONOTONIC)
39
+
40
+ if @_changed_files_cache[:at] && (now - @_changed_files_cache[:at]) < 3
41
+ return @_changed_files_cache[:files]
42
+ end
43
+
44
+ stdout, _, status = Open3.capture3("git", "diff", "--name-only", "HEAD", chdir: @project_path)
45
+ files = status.success? ? stdout.lines.map(&:strip).select { valid_path?(_1) } : []
46
+
47
+ @_changed_files_cache = { files:, at: now }
48
+ files
49
+ end
50
+
51
+ def discover_files
52
+ tracked = git_ls_files
53
+ untracked = git_ls_files("--others", "--exclude-standard")
54
+ all_files = (tracked + untracked).uniq
55
+
56
+ all_files.select { include_file?(_1) && valid_path?(_1) }
57
+ end
58
+
59
+ private
60
+
61
+ def git_ls_files(*args)
62
+ stdout, stderr, status = Open3.capture3("git", "ls-files", *args, chdir: @project_path)
63
+ raise NotAGitRepo, "Not a git repository: #{@project_path}" unless status.success?
64
+ stdout.lines.map(&:strip)
65
+ end
66
+
67
+ def include_file?(path)
68
+ return false if EXCLUDE_PATTERNS.any? { path.start_with?(_1) || path.include?("/#{_1}") }
69
+ INCLUDE_PATTERNS.any? { File.fnmatch?(_1, path, File::FNM_PATHNAME) }
70
+ end
71
+
72
+ def valid_path?(path)
73
+ full_path = File.join(@project_path, path)
74
+ return false unless File.exist?(full_path)
75
+ real = File.realpath(full_path)
76
+ real.start_with?(@project_path)
77
+ rescue Errno::ENOENT
78
+ false
79
+ end
80
+
81
+ def process_files(files)
82
+ # Remove chunks for deleted files
83
+ existing_paths = Chunk.distinct.pluck(:file_path)
84
+ deleted = existing_paths - files
85
+ Chunk.where(file_path: deleted).delete_all if deleted.any?
86
+
87
+ chunks_to_embed = []
88
+
89
+ files.each do |file_path|
90
+ full_path = File.join(@project_path, file_path)
91
+ source = File.read(full_path, encoding: "utf-8")
92
+
93
+ unless source.valid_encoding?
94
+ @errors << { file: file_path, error: "Invalid UTF-8 encoding" }
95
+ next
96
+ end
97
+
98
+ file_checksum = Digest::SHA256.hexdigest(source)
99
+
100
+ # Skip unchanged files
101
+ existing = Chunk.where(file_path:).first
102
+ next if existing && existing.checksum == file_checksum
103
+
104
+ # Remove old chunks for this file
105
+ Chunk.where(file_path:).delete_all
106
+
107
+ parsed = parse_file(file_path, source)
108
+ next if parsed.empty?
109
+
110
+ parsed.each do |result|
111
+ chunk_checksum = Digest::SHA256.hexdigest(result.content)
112
+ chunk = Chunk.create!(
113
+ file_path:,
114
+ line_start: result.line_start,
115
+ line_end: result.line_end,
116
+ chunk_type: result.chunk_type,
117
+ qualified_name: result.qualified_name,
118
+ content: result.content,
119
+ checksum: file_checksum
120
+ )
121
+ chunks_to_embed << chunk
122
+ end
123
+
124
+ # Batch embed
125
+ if chunks_to_embed.size >= BATCH_SIZE
126
+ embed_batch(chunks_to_embed)
127
+ chunks_to_embed = []
128
+ end
129
+ rescue => e
130
+ @errors << { file: file_path, error: e.message }
131
+ log(:warn, "Error indexing #{file_path}: #{e.message}")
132
+ end
133
+
134
+ embed_batch(chunks_to_embed) if chunks_to_embed.any?
135
+ end
136
+
137
+ def parse_file(file_path, source)
138
+ if file_path.end_with?(".rb")
139
+ RubyParser.parse(source, file_path:)
140
+ elsif file_path.end_with?(".erb")
141
+ ErbParser.parse(source, file_path:)
142
+ else
143
+ SlidingWindowParser.parse(source, file_path:)
144
+ end
145
+ end
146
+
147
+ def embed_batch(chunks)
148
+ return if chunks.empty?
149
+ texts = chunks.map(&:content)
150
+ vectors = @embedding_adapter.embed(texts)
151
+
152
+ chunks.each_with_index do |chunk, i|
153
+ chunk.update!(embedding: vectors[i])
154
+ end
155
+
156
+ GC.start
157
+ rescue => e
158
+ @errors << { file: "batch_embed", error: e.message }
159
+ log(:warn, "Embedding batch failed: #{e.message}")
160
+ end
161
+
162
+ def update_metadata
163
+ Database::Metadata.set "last_reindex_at", Time.now.iso8601
164
+ Database::Metadata.set "embedding_provider", @embedding_adapter.class.name.split("::").last
165
+ Database::Metadata.set "embedding_dimensions", @embedding_adapter.dimensions
166
+ end
167
+
168
+ def log(level, message)
169
+ @logger&.send(level, message)
170
+ end
171
+ end
172
+ end
@@ -0,0 +1,104 @@
1
+ require "prism"
2
+
3
+ module RailsMcpCodeSearch
4
+ class RubyParser
5
+ ParseError = Class.new(StandardError)
6
+ Result = Data.define(:content, :line_start, :line_end, :chunk_type, :qualified_name)
7
+
8
+ def self.parse(source, file_path: nil)
9
+ new.parse(source, file_path:)
10
+ end
11
+
12
+ def parse(source, file_path: nil)
13
+ result = Prism.parse(source)
14
+ raise ParseError, result.errors.map(&:message).join(", ") unless result.success?
15
+
16
+ visitor = Visitor.new(source)
17
+ visitor.visit(result.value)
18
+ visitor.chunks
19
+ rescue ParseError
20
+ SlidingWindowParser.parse(source, file_path:)
21
+ end
22
+
23
+ class Visitor < Prism::Visitor
24
+ attr_reader :chunks
25
+
26
+ def initialize(source)
27
+ super()
28
+ @source = source
29
+ @lines = source.lines
30
+ @scope_stack = []
31
+ @chunks = []
32
+ end
33
+
34
+ def visit_class_node(node)
35
+ visit_container(node, "class")
36
+ end
37
+
38
+ def visit_module_node(node)
39
+ visit_container(node, "module")
40
+ end
41
+
42
+ def visit_def_node(node)
43
+ name = node.name.to_s
44
+ qualified = build_qualified_name(name, instance_method: true)
45
+ add_chunk(node, "method", qualified)
46
+ end
47
+
48
+ def visit_singleton_class_node(node)
49
+ # Extract class methods defined inside `class << self`
50
+ @in_singleton = true
51
+ super
52
+ @in_singleton = false
53
+ end
54
+
55
+ private
56
+
57
+ def visit_container(node, type)
58
+ name = constant_name(node.constant_path)
59
+ @scope_stack.push(name)
60
+
61
+ qualified = @scope_stack.join("::")
62
+ line_start = node.location.start_line
63
+ line_end = node.location.end_line
64
+ content = @lines[(line_start - 1)..(line_end - 1)].join
65
+ @chunks << Result.new(content:, line_start:, line_end:, chunk_type: type, qualified_name: qualified)
66
+
67
+ visit_child_nodes(node)
68
+
69
+ @scope_stack.pop
70
+ end
71
+
72
+ def add_chunk(node, type, qualified_name)
73
+ line_start = node.location.start_line
74
+ line_end = node.location.end_line
75
+ content = @lines[(line_start - 1)..(line_end - 1)].join
76
+ @chunks << Result.new(content:, line_start:, line_end:, chunk_type: type, qualified_name:)
77
+ end
78
+
79
+ def build_qualified_name(method_name, instance_method: true)
80
+ prefix = @scope_stack.join("::")
81
+ separator = (@in_singleton ? "." : (instance_method ? "#" : "."))
82
+ prefix.empty? ? method_name : "#{prefix}#{separator}#{method_name}"
83
+ end
84
+
85
+ def constant_name(node)
86
+ case node
87
+ when Prism::ConstantReadNode
88
+ node.name.to_s
89
+ when Prism::ConstantPathNode
90
+ parts = []
91
+ current = node
92
+ while current.is_a?(Prism::ConstantPathNode)
93
+ parts.unshift(current.name.to_s)
94
+ current = current.parent
95
+ end
96
+ parts.unshift(current.name.to_s) if current.is_a?(Prism::ConstantReadNode)
97
+ parts.join("::")
98
+ else
99
+ node.to_s
100
+ end
101
+ end
102
+ end
103
+ end
104
+ end
@@ -0,0 +1,63 @@
1
+ require "logger"
2
+
3
+ module RailsMcpCodeSearch
4
+ class Runtime
5
+ attr_reader :db_path, :embedding_adapter, :indexer, :worker, :logger, :project_path
6
+
7
+ def self.boot(project_path: Dir.pwd, db_path: nil)
8
+ new(project_path:, db_path:).tap(&:boot)
9
+ end
10
+
11
+ def initialize(project_path: Dir.pwd, db_path: nil)
12
+ @project_path = project_path
13
+ @db_path = db_path
14
+ @logger = Logger.new($stderr, level: log_level)
15
+ @logger.formatter = proc { |severity, _time, _progname, msg| "[rails-mcp-code-search] #{severity}: #{msg}\n" }
16
+ end
17
+
18
+ def boot
19
+ @db_path = Database.setup(project_path: @project_path, db_path: @db_path)
20
+ @embedding_adapter = build_adapter
21
+ check_dimension_mismatch
22
+ @indexer = Indexer.new(embedding_adapter: @embedding_adapter, project_path: @project_path, logger: @logger)
23
+ @worker = BackgroundWorker.new(indexer: @indexer, logger: @logger)
24
+ @worker.start
25
+ setup_shutdown_hooks
26
+ @logger.info "Booted for #{@project_path}"
27
+ end
28
+
29
+ def shutdown
30
+ @worker&.stop
31
+ @logger.info "Shut down"
32
+ end
33
+
34
+ private
35
+
36
+ def build_adapter
37
+ case ENV.fetch("RAILS_MCP_CODE_SEARCH_PROVIDER", "local")
38
+ when "openai" then Embeddings::OpenaiAdapter.new
39
+ else Embeddings::LocalAdapter.new
40
+ end
41
+ end
42
+
43
+ def check_dimension_mismatch
44
+ stored = Database::Metadata.get("embedding_dimensions")&.to_i
45
+ return unless stored
46
+ return if stored == @embedding_adapter.dimensions
47
+
48
+ @logger.warn "Dimension mismatch (stored: #{stored}, active: #{@embedding_adapter.dimensions}). Triggering full reindex."
49
+ Chunk.delete_all
50
+ Database::Metadata.set "embedding_dimensions", @embedding_adapter.dimensions
51
+ end
52
+
53
+ def setup_shutdown_hooks
54
+ at_exit { shutdown }
55
+ trap("INT") { shutdown; exit }
56
+ trap("TERM") { shutdown; exit }
57
+ end
58
+
59
+ def log_level
60
+ ENV.fetch("RAILS_MCP_CODE_SEARCH_LOG_LEVEL", "info")
61
+ end
62
+ end
63
+ end
@@ -0,0 +1,25 @@
1
+ require "mcp"
2
+
3
+ module RailsMcpCodeSearch
4
+ class Server
5
+ TOOLS = [
6
+ Tools::ReindexTool,
7
+ Tools::SearchTool,
8
+ Tools::StatusTool
9
+ ].freeze
10
+
11
+ def self.start(project_path: Dir.pwd, db_path: nil)
12
+ runtime = Runtime.boot(project_path:, db_path:)
13
+
14
+ server = ::MCP::Server.new(
15
+ name: "rails-mcp-code-search",
16
+ version: VERSION,
17
+ tools: TOOLS,
18
+ server_context: { runtime: }
19
+ )
20
+
21
+ transport = ::MCP::Server::Transports::StdioTransport.new(server)
22
+ transport.open
23
+ end
24
+ end
25
+ end
@@ -0,0 +1,39 @@
1
+ module RailsMcpCodeSearch
2
+ class SlidingWindowParser
3
+ WINDOW_SIZE = 50
4
+ OVERLAP = 10
5
+ MAX_CHUNKS = 200
6
+
7
+ Result = RubyParser::Result
8
+
9
+ def self.parse(source, file_path: nil)
10
+ new.parse(source, file_path:)
11
+ end
12
+
13
+ def parse(source, file_path: nil)
14
+ lines = source.lines
15
+ return [] if lines.empty?
16
+
17
+ chunks = []
18
+ step = WINDOW_SIZE - OVERLAP
19
+ offset = 0
20
+
21
+ while offset < lines.size && chunks.size < MAX_CHUNKS
22
+ window_end = [ offset + WINDOW_SIZE, lines.size ].min
23
+ content = lines[offset...window_end].join
24
+
25
+ chunks << Result.new(
26
+ content:,
27
+ line_start: offset + 1,
28
+ line_end: window_end,
29
+ chunk_type: "window",
30
+ qualified_name: nil
31
+ )
32
+
33
+ offset += step
34
+ end
35
+
36
+ chunks
37
+ end
38
+ end
39
+ end
@@ -0,0 +1,26 @@
1
+ require "mcp"
2
+ require "json"
3
+
4
+ module RailsMcpCodeSearch
5
+ module Tools
6
+ class BaseTool < ::MCP::Tool
7
+ class << self
8
+ private
9
+
10
+ def runtime_for(server_context:)
11
+ server_context[:runtime]
12
+ end
13
+
14
+ def text_response(data)
15
+ text = data.is_a?(String) ? data : JSON.generate(data)
16
+ ::MCP::Tool::Response.new([ { type: "text", text: } ])
17
+ end
18
+
19
+ def error_response(error:, message:, recoverable: false, suggested_action: nil)
20
+ data = { error:, message:, recoverable:, suggested_action: }.compact
21
+ ::MCP::Tool::Response.new([ { type: "text", text: JSON.generate(data) } ], error: true)
22
+ end
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,51 @@
1
+ module RailsMcpCodeSearch
2
+ module Tools
3
+ class ReindexTool < BaseTool
4
+ tool_name "reindex"
5
+ description "Trigger codebase reindex. Use full=true to rebuild the entire index. " \
6
+ "Returns immediately — use the status tool to check progress."
7
+
8
+ input_schema(
9
+ properties: {
10
+ full: { type: "boolean", description: "Full reindex (default: incremental)" }
11
+ }
12
+ )
13
+
14
+ annotations(
15
+ title: "Reindex Code",
16
+ read_only_hint: false,
17
+ destructive_hint: false,
18
+ idempotent_hint: true,
19
+ open_world_hint: false
20
+ )
21
+
22
+ def self.call(server_context:, full: nil)
23
+ runtime = runtime_for(server_context:)
24
+ full = full == true
25
+
26
+ if full
27
+ runtime.worker.enqueue(:full_index)
28
+ runtime.worker.increment_reindex_count
29
+ estimated = runtime.indexer.discover_files.size rescue 0
30
+
31
+ text_response({
32
+ status: "reindex_started",
33
+ mode: "full",
34
+ estimated_files: estimated
35
+ })
36
+ else
37
+ changed = runtime.indexer.changed_files
38
+ if changed.empty?
39
+ text_response({ status: "no_changes", mode: "incremental", changed_files: 0 })
40
+ else
41
+ runtime.worker.enqueue(:index_files, payload: changed)
42
+ runtime.worker.increment_reindex_count
43
+ text_response({ status: "reindex_started", mode: "incremental", changed_files: changed.size })
44
+ end
45
+ end
46
+ rescue => e
47
+ error_response(error: "reindex_error", message: e.message, recoverable: true)
48
+ end
49
+ end
50
+ end
51
+ end
@@ -0,0 +1,128 @@
1
+ module RailsMcpCodeSearch
2
+ module Tools
3
+ class SearchTool < BaseTool
4
+ tool_name "search"
5
+ description "Search the codebase using semantic similarity. Use this when you need to find " \
6
+ "code by concept or behavior (e.g., 'authentication logic', 'payment processing') " \
7
+ "rather than by exact identifier. For exact string matches, prefer Grep. " \
8
+ "Returns code chunks ranked by cosine similarity. " \
9
+ "Scores above 0.7 are typically strong matches, 0.5-0.7 are partial matches."
10
+
11
+ input_schema(
12
+ properties: {
13
+ query: { type: "string", description: "Search query (natural language or code)" },
14
+ limit: { type: "integer", description: "Max results (default 10)" },
15
+ file_pattern: { type: "string", description: "Glob pattern to filter results by file path (e.g. 'app/models/**/*.rb'). Applied after similarity search." }
16
+ },
17
+ required: %w[query]
18
+ )
19
+
20
+ annotations(
21
+ title: "Search Code",
22
+ read_only_hint: true,
23
+ destructive_hint: false,
24
+ idempotent_hint: true,
25
+ open_world_hint: false
26
+ )
27
+
28
+ def self.call(query:, server_context:, limit: nil, file_pattern: nil)
29
+ runtime = runtime_for(server_context:)
30
+ limit = (limit || 10).clamp(1, 50)
31
+
32
+ if Chunk.count == 0
33
+ worker_state = runtime.worker.state
34
+ if worker_state == :indexing
35
+ return error_response(error: "indexing_in_progress", message: "Index is still building. Try again in a moment.", recoverable: true, suggested_action: "status")
36
+ else
37
+ return error_response(error: "index_empty", message: "No files indexed yet. Call reindex first.", recoverable: true, suggested_action: "reindex")
38
+ end
39
+ end
40
+
41
+ # Smart reindex: enqueue changed files and wait briefly
42
+ trigger_smart_reindex(runtime)
43
+
44
+ # Generate query embedding
45
+ query_vector = runtime.embedding_adapter.embed([ query ]).first
46
+
47
+ # KNN search — over-fetch if filtering by file pattern
48
+ fetch_limit = file_pattern ? limit * 5 : limit
49
+ raw_results = Chunk.nearest_neighbors(:embedding, query_vector, distance: "cosine").first(fetch_limit)
50
+
51
+ # Filter by file pattern
52
+ filtered_out = 0
53
+ if file_pattern
54
+ before_count = raw_results.size
55
+ raw_results = raw_results.select { File.fnmatch?(file_pattern, _1.file_path, File::FNM_PATHNAME) }
56
+ filtered_out = before_count - raw_results.size
57
+ end
58
+
59
+ # Dedup overlapping results from same file
60
+ results = dedup_overlapping(raw_results)
61
+ results = results.first(limit)
62
+
63
+ # Track metrics
64
+ runtime.worker.enqueue_hit_counts(results.map(&:id))
65
+ runtime.worker.increment_search_count
66
+
67
+ stale = runtime.worker.state == :indexing
68
+
69
+ text_response({
70
+ results: results.map { format_result(_1) },
71
+ metadata: {
72
+ query:,
73
+ limit:,
74
+ count: results.size,
75
+ has_more: raw_results.size > limit,
76
+ index_state: runtime.worker.state.to_s,
77
+ index_completeness: runtime.worker.state == :idle ? 1.0 : runtime.worker.progress,
78
+ results_may_be_stale: stale,
79
+ total_indexed_chunks: Chunk.count,
80
+ filtered_out_count: filtered_out
81
+ }
82
+ })
83
+ rescue => e
84
+ error_response(error: "search_error", message: e.message, recoverable: true)
85
+ end
86
+
87
+ class << self
88
+ private
89
+
90
+ def trigger_smart_reindex(runtime)
91
+ changed = runtime.indexer.changed_files
92
+ return if changed.empty?
93
+
94
+ runtime.worker.enqueue(:index_files, payload: changed)
95
+ runtime.worker.wait_for_reindex(timeout: 0.2)
96
+ end
97
+
98
+ def dedup_overlapping(results)
99
+ seen = {}
100
+ results.reject do |r|
101
+ key = r.file_path
102
+ if seen[key]
103
+ overlap = seen[key].any? do |prev|
104
+ r.line_start <= prev.line_end && r.line_end >= prev.line_start
105
+ end
106
+ overlap
107
+ else
108
+ seen[key] = [ r ]
109
+ false
110
+ end.tap { seen[key] = (seen[key] || []) + [ r ] unless _1 }
111
+ end
112
+ end
113
+
114
+ def format_result(chunk)
115
+ {
116
+ file_path: chunk.file_path,
117
+ line_start: chunk.line_start,
118
+ line_end: chunk.line_end,
119
+ chunk_type: chunk.chunk_type,
120
+ qualified_name: chunk.qualified_name,
121
+ content: chunk.content,
122
+ similarity: (1.0 - chunk.neighbor_distance).round(4)
123
+ }
124
+ end
125
+ end
126
+ end
127
+ end
128
+ end
@@ -0,0 +1,64 @@
1
+ module RailsMcpCodeSearch
2
+ module Tools
3
+ class StatusTool < BaseTool
4
+ tool_name "status"
5
+ description "Show index health and readiness. Use to check if indexing is complete " \
6
+ "before searching, or to diagnose issues."
7
+
8
+ input_schema(properties: {})
9
+
10
+ annotations(
11
+ title: "Index Status",
12
+ read_only_hint: true,
13
+ destructive_hint: false,
14
+ idempotent_hint: true,
15
+ open_world_hint: false
16
+ )
17
+
18
+ def self.call(server_context:)
19
+ runtime = runtime_for(server_context:)
20
+ worker = runtime.worker
21
+
22
+ chunk_count = Chunk.count
23
+ file_count = Chunk.distinct.pluck(:file_path).size
24
+
25
+ state = if worker.state == :error
26
+ "error"
27
+ elsif worker.state == :indexing
28
+ "indexing"
29
+ elsif chunk_count == 0
30
+ "empty"
31
+ else
32
+ "ready"
33
+ end
34
+
35
+ db_size = File.size(runtime.db_path) rescue 0
36
+
37
+ top_chunks = Chunk.where("hit_count > 0").order(hit_count: :desc).limit(5).map do |c|
38
+ { file_path: c.file_path, qualified_name: c.qualified_name, hit_count: c.hit_count }
39
+ end
40
+
41
+ text_response({
42
+ state:,
43
+ chunk_count:,
44
+ file_count:,
45
+ db_size_bytes: db_size,
46
+ index_completeness: worker.state == :idle ? 1.0 : worker.progress,
47
+ embedding_provider: runtime.embedding_adapter.class.name.split("::").last.sub("Adapter", "").downcase,
48
+ embedding_dimensions: runtime.embedding_adapter.dimensions,
49
+ project_path: runtime.project_path,
50
+ indexing_errors: worker.errors.first(10),
51
+ stats: {
52
+ total_searches: Database::Metadata.get("total_searches").to_i,
53
+ total_reindexes: Database::Metadata.get("total_reindexes").to_i,
54
+ last_search_at: Database::Metadata.get("last_search_at"),
55
+ last_reindex_at: Database::Metadata.get("last_reindex_at")
56
+ },
57
+ top_chunks_by_hits: top_chunks
58
+ })
59
+ rescue => e
60
+ error_response(error: "status_error", message: e.message, recoverable: true)
61
+ end
62
+ end
63
+ end
64
+ end
@@ -0,0 +1,3 @@
1
+ module RailsMcpCodeSearch
2
+ VERSION = ENV.fetch("RAILS_MCP_CODE_SEARCH_VERSION", "0.0.0.dev")
3
+ end